output.var = params$output.var
transform.abs = FALSE
log.pred = params$log.pred
norm.pred = FALSE
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret
message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 7
## $ output.var : chr "y3"
## $ log.pred : logi TRUE
## $ algo.forward.caret : logi TRUE
## $ algo.backward.caret: logi TRUE
## $ algo.stepwise.caret: logi TRUE
## $ algo.LASSO.caret : logi TRUE
## $ algo.LARS.caret : logi TRUE
# Setup Labels
output.var.tr = if (log.pred == TRUE) paste0(output.var,'.log') else output.var.tr = output.var
feat = read.csv('../../Data/features_highprec.csv')
labels = read.csv('../../Data/labels.csv')
predictors = names(dplyr::select(feat,-JobName))
data.ori = inner_join(feat,labels,by='JobName')
#data.ori = inner_join(feat,select_at(labels,c('JobName',output.var)),by='JobName')
cc = complete.cases(data.ori)
data.notComplete = data.ori[! cc,]
data = data.ori[cc,] %>% select_at(c(predictors,output.var,'JobName'))
message('Original cases: ',nrow(data.ori))
## Original cases: 10000
message('Non-Complete cases: ',nrow(data.notComplete))
## Non-Complete cases: 3020
message('Complete cases: ',nrow(data))
## Complete cases: 6980
The Output Variable y3 shows right skewness, so will proceed with a log transofrmation
ggplot(gather(select_at(data,output.var)), aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
facet_wrap(~key, scales = 'free',ncol=4)
ggplot(gather(select_at(data,output.var)), aes(sample=value)) +
stat_qq() +
facet_wrap(~key, scales = 'free',ncol=4)
if(log.pred==TRUE) data[[output.var.tr]] = log(data[[output.var]],10) else
data[[output.var.tr]] = data[[output.var]]
ggplot(gather(select_at(data,c(output.var,output.var.tr))), aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
facet_wrap(~key, scales = 'free',ncol=2)
ggplot(gather(select_at(data,c(output.var,output.var.tr))), aes(sample=value)) +
stat_qq() +
facet_wrap(~key, scales = 'free',ncol=4)
Normalization of y3 using bestNormalize package. (suggested orderNorm) This is cool, but I think is too far for the objective of the project
t=bestNormalize::bestNormalize(data[[output.var]])
t
## Best Normalizing transformation with 6980 Observations
## Estimated Normality Statistics (Pearson P / df, lower => more normal):
## - No transform: 2.8788
## - Box-Cox: 1.3966
## - Log_b(x+a): 1.9242
## - sqrt(x+a): 2.3331
## - exp(x): 748.7106
## - arcsinh(x): 1.9245
## - Yeo-Johnson: 1.1012
## - orderNorm: 1.0521
## Estimation method: Out-of-sample via CV with 10 folds and 5 repeats
##
## Based off these, bestNormalize chose:
## orderNorm Transformation with 6980 nonmissing obs and no ties
## - Original quantiles:
## 0% 25% 50% 75% 100%
## 95.913 118.289 124.030 131.059 193.726
qqnorm(data[[output.var]])
qqnorm(predict(t))
orderNorm() is a rank-based procedure by which the values of a vector are mapped to their percentile, which is then mapped to the same percentile of the normal distribution. Without the presence of ties, this essentially guarantees that the transformation leads to a uniform distribution
All predictors show a Fat-Tail situation, where the two tails are very tall, and a low distribution around the mean. The orderNorm transromation can help (see [Best Normalizator] section)
Histograms
cols = c('x11','x18','stat98','x7','stat110')
ggplot(gather(select_at(data,cols)), aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
facet_wrap(~key, scales = 'free',ncol=3)
# ggplot(gather(select_at(data,cols)), aes(sample=value)) +
# stat_qq()+
# facet_wrap(~key, scales = 'free',ncol=2)
lapply(select_at(data,cols),summary)
## $x11
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 9.000e-08 9.494e-08 1.001e-07 1.001e-07 1.052e-07 1.100e-07
##
## $x18
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 1.500 3.147 4.769 4.772 6.418 7.999
##
## $stat98
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -2.998619 -1.551882 -0.015993 -0.005946 1.528405 2.999499
##
## $x7
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 0.700 1.266 1.854 1.852 2.446 3.000
##
## $stat110
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## -2.999543 -1.496865 -0.002193 -0.004129 1.504273 2.999563
Scatter plot vs. output variable **y3.log
d = gather(dplyr::select_at(data,c(cols,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light green',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=3)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
All indicators have a strong indication of Fat-Tails
ggplot(gather(select_at(data,predictors)), aes(value)) +
geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
geom_density() +
facet_wrap(~key, scales = 'free',ncol=4)
#chart.Correlation(select(data,-JobName), pch=21)
t=as.data.frame(round(cor(dplyr::select(data,-one_of(output.var.tr,'JobName'))
,select_at(data,output.var.tr)),4)) %>%
rownames_to_column(var='variable') %>% filter(variable != !!output.var) %>% arrange(-y3.log)
#DT::datatable(t)
message("Top Positive")
## Top Positive
kable(head(arrange(t,desc(y3.log)),20))
| variable | y3.log |
|---|---|
| x18 | 0.3120 |
| x7 | 0.2091 |
| stat98 | 0.1784 |
| x9 | 0.1127 |
| x17 | 0.0611 |
| x16 | 0.0489 |
| x10 | 0.0472 |
| x21 | 0.0412 |
| x11 | 0.0322 |
| x8 | 0.0318 |
| stat156 | 0.0287 |
| stat23 | 0.0234 |
| stat100 | 0.0206 |
| stat144 | 0.0203 |
| stat59 | 0.0202 |
| stat60 | 0.0199 |
| stat195 | 0.0199 |
| stat141 | 0.0194 |
| stat73 | 0.0192 |
| stat197 | 0.0185 |
message("Top Negative")
## Top Negative
kable(head(arrange(t,y3.log),20))
| variable | y3.log |
|---|---|
| stat110 | -0.1594 |
| x4 | -0.0603 |
| stat13 | -0.0345 |
| stat41 | -0.0345 |
| stat14 | -0.0317 |
| stat149 | -0.0309 |
| stat113 | -0.0279 |
| stat4 | -0.0248 |
| stat106 | -0.0236 |
| stat146 | -0.0236 |
| stat186 | -0.0217 |
| stat91 | -0.0210 |
| stat214 | -0.0209 |
| stat5 | -0.0207 |
| stat22 | -0.0202 |
| stat39 | -0.0202 |
| stat175 | -0.0194 |
| stat187 | -0.0193 |
| stat128 | -0.0192 |
| stat37 | -0.0191 |
#chart.Correlation(select(data,-JobName), pch=21)
t=as.data.frame(round(cor(dplyr::select(data,-one_of('JobName'))),4))
#DT::datatable(t,options=list(scrollX=T))
message("Showing only 10 variables")
## Showing only 10 variables
kable(t[1:10,1:10])
| x1 | x2 | x3 | x4 | x5 | x6 | x7 | x8 | x9 | x10 | |
|---|---|---|---|---|---|---|---|---|---|---|
| x1 | 1.0000 | 0.0034 | -0.0028 | 0.0085 | 0.0068 | 0.0159 | 0.0264 | -0.0012 | 0.0142 | 0.0013 |
| x2 | 0.0034 | 1.0000 | -0.0057 | 0.0004 | -0.0094 | -0.0101 | 0.0089 | 0.0078 | 0.0049 | -0.0214 |
| x3 | -0.0028 | -0.0057 | 1.0000 | 0.0029 | 0.0046 | 0.0006 | -0.0105 | -0.0002 | 0.0167 | -0.0137 |
| x4 | 0.0085 | 0.0004 | 0.0029 | 1.0000 | -0.0059 | 0.0104 | 0.0098 | 0.0053 | 0.0061 | -0.0023 |
| x5 | 0.0068 | -0.0094 | 0.0046 | -0.0059 | 1.0000 | 0.0016 | -0.0027 | 0.0081 | 0.0259 | -0.0081 |
| x6 | 0.0159 | -0.0101 | 0.0006 | 0.0104 | 0.0016 | 1.0000 | 0.0200 | -0.0157 | 0.0117 | -0.0072 |
| x7 | 0.0264 | 0.0089 | -0.0105 | 0.0098 | -0.0027 | 0.0200 | 1.0000 | -0.0018 | -0.0069 | -0.0221 |
| x8 | -0.0012 | 0.0078 | -0.0002 | 0.0053 | 0.0081 | -0.0157 | -0.0018 | 1.0000 | 0.0142 | -0.0004 |
| x9 | 0.0142 | 0.0049 | 0.0167 | 0.0061 | 0.0259 | 0.0117 | -0.0069 | 0.0142 | 1.0000 | 0.0149 |
| x10 | 0.0013 | -0.0214 | -0.0137 | -0.0023 | -0.0081 | -0.0072 | -0.0221 | -0.0004 | 0.0149 | 1.0000 |
Scatter plots with all predictors and the output variable (y3.log)
d = gather(dplyr::select_at(data,c(predictors,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light blue',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=4)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
No Multicollinearity among predictors
Showing Top predictor by VIF Value
vifDF = usdm::vif(select_at(data,predictors)) %>% arrange(desc(VIF))
head(vifDF,15)
## Variables VIF
## 1 stat174 1.064302
## 2 stat150 1.062254
## 3 stat147 1.059959
## 4 stat46 1.059717
## 5 stat178 1.059640
## 6 stat35 1.058976
## 7 stat169 1.058927
## 8 stat19 1.058803
## 9 stat6 1.058525
## 10 stat95 1.058466
## 11 stat72 1.058419
## 12 x3 1.058266
## 13 stat202 1.058198
## 14 stat205 1.058128
## 15 stat128 1.058010
data.tr=data %>%
mutate(x18.sqrt = sqrt(x18))
cols=c('x18','x18.sqrt')
# ggplot(gather(select_at(data.tr,cols)), aes(value)) +
# geom_histogram(aes(y=..density..),bins = 50,fill='light blue') +
# geom_density() +
# facet_wrap(~key, scales = 'free',ncol=4)
d = gather(dplyr::select_at(data.tr,c(cols,output.var.tr)),key=target,value=value,-!!output.var.tr)
ggplot(data=d, aes_string(x='value',y=output.var.tr)) +
geom_point(color='light blue',alpha=0.5) +
geom_smooth() +
facet_wrap(~target, scales = 'free',ncol=4)
## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'
#removing unwanted variables
data.tr=data.tr %>%
dplyr::select_at(names(data.tr)[! names(data.tr) %in% c('x18','y3','JobName')])
data=data.tr
label.names=output.var.tr
data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)
data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)
plot.diagnostics <- function(model, train) {
plot(model)
residuals = resid(model) # Plotted above in plot(lm.out)
r.standard = rstandard(model)
r.student = rstudent(model)
plot(predict(model,train),r.student,
ylab="Student Residuals", xlab="Predicted Values",
main="Student Residual Plot")
abline(0, 0)
plot(predict(model, train),r.standard,
ylab="Standard Residuals", xlab="Predicted Values",
main="Standard Residual Plot")
abline(0, 0)
abline(2, 0)
abline(-2, 0)
# Histogram
hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals",
xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))
# Create range of x-values for normal curve
xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)
# Generate values from the normal distribution at the specified values
yfit <- (dnorm(xfit))
# Add the normal curve
lines(xfit, yfit, ylim=c(0,0.5))
# http://www.stat.columbia.edu/~martin/W2024/R7.pdf
# Influential plots
inf.meas = influence.measures(model)
# print (summary(inf.meas)) # too much data
# Leverage plot
lev = hat(model.matrix(model))
plot(lev, ylab = 'Leverage - check')
# Cook's Distance
cd = cooks.distance(model)
plot(cd,ylab="Cooks distances")
abline(4/nrow(train),0)
abline(1,0)
print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = ""))
print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = ""))
return(cd)
}
# function to set up random seeds
# Based on http://jaehyeon-kim.github.io/2015/05/Setup-Random-Seeds-on-Caret-Package.html
setCaretSeeds <- function(method = "cv", numbers = 1, repeats = 1, tunes = NULL, seed = 1701) {
#B is the number of resamples and integer vector of M (numbers + tune length if any)
B <- if (method == "cv") numbers
else if(method == "repeatedcv") numbers * repeats
else NULL
if(is.null(length)) {
seeds <- NULL
} else {
set.seed(seed = seed)
seeds <- vector(mode = "list", length = B)
seeds <- lapply(seeds, function(x) sample.int(n = 1000000
, size = numbers + ifelse(is.null(tunes), 0, tunes)))
seeds[[length(seeds) + 1]] <- sample.int(n = 1000000, size = 1)
}
# return seeds
seeds
}
train.caret.glmselect = function(formula, data, method
,subopt = NULL, feature.names
, train.control = NULL, tune.grid = NULL, pre.proc = NULL){
if(is.null(train.control)){
train.control <- trainControl(method = "cv"
,number = 10
,seeds = setCaretSeeds(method = "cv"
, numbers = 10
, seed = 1701)
,search = "grid"
,verboseIter = TRUE
,allowParallel = TRUE
)
}
if(is.null(tune.grid)){
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
tune.grid = data.frame(nvmax = 1:length(feature.names))
}
if (method == 'glmnet' && subopt == 'LASSO'){
# Will only show 1 Lambda value during training, but that is OK
# https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
# Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
lambda = 10^seq(-2,0, length =100)
alpha = c(1)
tune.grid = expand.grid(alpha = alpha,lambda = lambda)
}
if (method == 'lars'){
# https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
fraction = seq(0, 1, length = 100)
tune.grid = expand.grid(fraction = fraction)
pre.proc = c("center", "scale")
}
}
# http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
registerDoParallel(cl)
set.seed(1)
# note that the seed has to actually be set just before this function is called
# settign is above just not ensure reproducibility for some reason
model.caret <- caret::train(formula
, data = data
, method = method
, tuneGrid = tune.grid
, trControl = train.control
, preProc = pre.proc
)
stopCluster(cl)
registerDoSEQ() # register sequential engine in case you are not using this function anymore
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
print("All models results")
print(model.caret$results) # all model results
print("Best Model")
print(model.caret$bestTune) # best model
model = model.caret$finalModel
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-nvmax) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=4,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
# leap function does not support studentized residuals
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
geom_density(color='lightblue4') +
theme_light()
plot(residHistogram)
id = rownames(model.caret$bestTune)
# Provides the coefficients of the best model
# regsubsets doens return a full model (see documentation of regsubset), so we need to recalcualte themodel
# https://stackoverflow.com/questions/13063762/how-to-obtain-a-lm-object-from-regsubsets
print("Coefficients of final model:")
coefs <- coef(model, id=id)
#calculate the model to the the coef intervals
nams <- names(coefs)
nams <- nams[!nams %in% "(Intercept)"]
response <- as.character(formula[[2]])
form <- as.formula(paste(response, paste(nams, collapse = " + "), sep = " ~ "))
mod <- lm(form, data = data)
#coefs
#coef(mod)
print(car::Confint(mod))
return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram
,modelLM=mod))
}
if (method == 'glmnet' && subopt == 'LASSO'){
print(model.caret)
print(plot(model.caret))
print(model.caret$bestTune)
print(model.caret$results)
model=model.caret$finalModel
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-lambda) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=4,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
geom_density(color='lightblue4') +
theme_light()
plot(residHistogram)
print("Coefficients")
#no interval for glmnet: https://stackoverflow.com/questions/39750965/confidence-intervals-for-ridge-regression
t=coef(model,s=model.caret$bestTune$lambda)
model.coef = t[which(t[,1]!=0),]
print(as.data.frame(model.coef))
id = NULL # not really needed but added for consistency
return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
}
if (method == 'lars'){
print(model.caret)
print(plot(model.caret))
print(model.caret$bestTune)
# Metrics Plot
dataPlot = model.caret$results %>%
gather(key='metric',value='value',-fraction) %>%
dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
geom_line(color='lightblue4') +
geom_point(color='blue',alpha=0.7,size=.9) +
facet_wrap(~metric,ncol=4,scales='free_y')+
theme_light()
plot(metricsPlot)
# Residuals Plot
dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
geom_point(color='light blue',alpha=0.7) +
geom_smooth(method="lm")+
theme_light()
plot(residPlot)
residHistogram = ggplot(dataPlot,aes(x=res)) +
geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
geom_density(color='lightblue4') +
theme_light()
plot(residHistogram)
print("Coefficients")
t=coef(model.caret$finalModel,s=model.caret$bestTune$fraction,mode='fraction')
model.coef = t[which(t!=0)]
print(model.coef)
id = NULL # not really needed but added for consistency
return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
}
}
# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
#form <- as.formula(object$call[[2]])
mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
coefi <- coef(object, id = id)
xvars <- names(coefi)
return(mat[,xvars]%*%coefi)
}
test.model = function(model, test, level=0.95
,draw.limits = FALSE, good = 0.1, ok = 0.15
,method = NULL, subopt = NULL
,id = NULL, formula, feature.names, label.names
,transformation = NULL){
## if using caret for glm select equivalent functionality,
## need to pass formula (full is ok as it will select subset of variables from there)
if (is.null(method)){
pred = predict(model, newdata=test, interval="confidence", level = level)
}
if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
}
if (method == 'glmnet' && subopt == 'LASSO'){
xtest = as.matrix(test[,feature.names])
pred=as.data.frame(predict(model, xtest))
}
if (method == 'lars'){
pred=as.data.frame(predict(model, newdata = test))
}
# Summary of predicted values
print ("Summary of predicted values: ")
print(summary(pred[,1]))
test.mse = mean((test[,label.names]-pred[,1])^2)
print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
if(log.pred == TRUE || norm.pred == TRUE){
# plot transformewd comparison first
plot(test[,label.names],pred[,1],xlab = "Actual (Transformed)", ylab = "Predicted (Transformed)")
abline(coef=c(0,1),col='blue')
}
if (log.pred == FALSE && norm.pred == FALSE){
x = test[,label.names]
y = pred[,1]
}
if (log.pred == TRUE){
x = 10^test[,label.names]
y = 10^pred[,1]
}
if (norm.pred == TRUE){
x = predict(transformation, test[,label.names], inverse = TRUE)
y = predict(transformation, pred[,1], inverse = TRUE)
}
plot(x, y, xlab = "Actual", ylab = "Predicted")
abline(0,(1+good),col='green', lwd = 3)
abline(0,(1-good),col='green', lwd = 3)
abline(0,(1+ok),col='blue', lwd = 3)
abline(0,(1-ok),col='blue', lwd = 3)
}
n <- names(data.train)
formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")
," ~", paste(n[!n %in% label.names], collapse = " + ")))
grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))
print(formula)
## y3.log ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 +
## x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 +
## x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 +
## stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 +
## stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 +
## stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 +
## stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 +
## stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 +
## stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 +
## stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 +
## stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 +
## stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 +
## stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 +
## stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 +
## stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 +
## stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 +
## stat99 + stat100 + stat101 + stat102 + stat103 + stat104 +
## stat105 + stat106 + stat107 + stat108 + stat109 + stat110 +
## stat111 + stat112 + stat113 + stat114 + stat115 + stat116 +
## stat117 + stat118 + stat119 + stat120 + stat121 + stat122 +
## stat123 + stat124 + stat125 + stat126 + stat127 + stat128 +
## stat129 + stat130 + stat131 + stat132 + stat133 + stat134 +
## stat135 + stat136 + stat137 + stat138 + stat139 + stat140 +
## stat141 + stat142 + stat143 + stat144 + stat145 + stat146 +
## stat147 + stat148 + stat149 + stat150 + stat151 + stat152 +
## stat153 + stat154 + stat155 + stat156 + stat157 + stat158 +
## stat159 + stat160 + stat161 + stat162 + stat163 + stat164 +
## stat165 + stat166 + stat167 + stat168 + stat169 + stat170 +
## stat171 + stat172 + stat173 + stat174 + stat175 + stat176 +
## stat177 + stat178 + stat179 + stat180 + stat181 + stat182 +
## stat183 + stat184 + stat185 + stat186 + stat187 + stat188 +
## stat189 + stat190 + stat191 + stat192 + stat193 + stat194 +
## stat195 + stat196 + stat197 + stat198 + stat199 + stat200 +
## stat201 + stat202 + stat203 + stat204 + stat205 + stat206 +
## stat207 + stat208 + stat209 + stat210 + stat211 + stat212 +
## stat213 + stat214 + stat215 + stat216 + stat217 + x18.sqrt
print(grand.mean.formula)
## y3.log ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]
model.full = lm(formula , data.train)
summary(model.full)
##
## Call:
## lm(formula = formula, data = data.train)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.084270 -0.020887 -0.004679 0.016324 0.192421
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.964e+00 9.567e-03 205.264 < 2e-16 ***
## x1 3.753e-05 6.551e-04 0.057 0.954317
## x2 2.546e-04 4.200e-04 0.606 0.544424
## x3 -4.489e-05 1.151e-04 -0.390 0.696427
## x4 -5.191e-05 9.107e-06 -5.700 1.26e-08 ***
## x5 4.627e-05 2.971e-04 0.156 0.876224
## x6 2.467e-05 5.966e-04 0.041 0.967015
## x7 1.160e-02 6.401e-04 18.126 < 2e-16 ***
## x8 3.561e-04 1.496e-04 2.380 0.017356 *
## x9 3.273e-03 3.327e-04 9.837 < 2e-16 ***
## x10 1.439e-03 3.093e-04 4.653 3.35e-06 ***
## x11 2.285e+05 7.408e+04 3.085 0.002047 **
## x12 -3.337e-05 1.891e-04 -0.176 0.859989
## x13 5.557e-05 7.523e-05 0.739 0.460115
## x14 -4.703e-04 3.247e-04 -1.448 0.147558
## x15 8.258e-05 3.104e-04 0.266 0.790195
## x16 9.426e-04 2.144e-04 4.396 1.12e-05 ***
## x17 1.667e-03 3.281e-04 5.080 3.90e-07 ***
## x19 3.192e-04 1.668e-04 1.913 0.055799 .
## x20 -2.265e-04 1.151e-03 -0.197 0.844078
## x21 1.585e-04 4.231e-05 3.746 0.000182 ***
## x22 -1.857e-04 3.464e-04 -0.536 0.591929
## x23 -1.755e-04 3.302e-04 -0.531 0.595166
## stat1 -8.102e-05 2.513e-04 -0.322 0.747132
## stat2 1.793e-04 2.477e-04 0.724 0.469244
## stat3 3.246e-04 2.502e-04 1.297 0.194589
## stat4 -5.361e-04 2.514e-04 -2.132 0.033050 *
## stat5 -7.079e-05 2.495e-04 -0.284 0.776623
## stat6 -1.708e-04 2.493e-04 -0.685 0.493322
## stat7 -3.447e-04 2.495e-04 -1.382 0.167084
## stat8 4.211e-04 2.491e-04 1.690 0.091004 .
## stat9 -4.007e-05 2.498e-04 -0.160 0.872552
## stat10 -3.630e-04 2.500e-04 -1.452 0.146454
## stat11 -3.163e-04 2.521e-04 -1.255 0.209528
## stat12 3.063e-04 2.479e-04 1.236 0.216586
## stat13 -6.601e-04 2.481e-04 -2.660 0.007830 **
## stat14 -1.024e-03 2.478e-04 -4.134 3.61e-05 ***
## stat15 -2.154e-04 2.478e-04 -0.869 0.384789
## stat16 1.297e-04 2.498e-04 0.519 0.603682
## stat17 -7.625e-05 2.467e-04 -0.309 0.757256
## stat18 -4.193e-04 2.490e-04 -1.684 0.092201 .
## stat19 1.981e-04 2.483e-04 0.798 0.425017
## stat20 -2.986e-04 2.491e-04 -1.199 0.230564
## stat21 -1.034e-04 2.500e-04 -0.414 0.679204
## stat22 -4.834e-04 2.496e-04 -1.937 0.052823 .
## stat23 4.457e-04 2.489e-04 1.791 0.073401 .
## stat24 -4.723e-04 2.512e-04 -1.881 0.060068 .
## stat25 -3.752e-04 2.488e-04 -1.508 0.131528
## stat26 -2.645e-04 2.490e-04 -1.063 0.288017
## stat27 3.282e-04 2.486e-04 1.321 0.186709
## stat28 4.716e-05 2.486e-04 0.190 0.849529
## stat29 2.858e-04 2.513e-04 1.137 0.255497
## stat30 3.608e-04 2.528e-04 1.427 0.153617
## stat31 9.486e-05 2.514e-04 0.377 0.705976
## stat32 -2.828e-05 2.514e-04 -0.113 0.910430
## stat33 -2.987e-04 2.483e-04 -1.203 0.229105
## stat34 -4.335e-05 2.494e-04 -0.174 0.862017
## stat35 -2.733e-04 2.500e-04 -1.093 0.274286
## stat36 1.840e-04 2.470e-04 0.745 0.456316
## stat37 -2.395e-04 2.502e-04 -0.957 0.338598
## stat38 8.622e-05 2.503e-04 0.344 0.730545
## stat39 -1.508e-04 2.478e-04 -0.608 0.542894
## stat40 2.898e-04 2.485e-04 1.166 0.243603
## stat41 -3.423e-04 2.474e-04 -1.384 0.166408
## stat42 2.284e-04 2.481e-04 0.920 0.357357
## stat43 -2.561e-04 2.488e-04 -1.029 0.303442
## stat44 6.146e-05 2.501e-04 0.246 0.805933
## stat45 -1.661e-04 2.497e-04 -0.665 0.505862
## stat46 4.545e-04 2.489e-04 1.826 0.067967 .
## stat47 2.701e-04 2.506e-04 1.078 0.281133
## stat48 2.719e-04 2.499e-04 1.088 0.276618
## stat49 2.534e-04 2.478e-04 1.023 0.306560
## stat50 2.624e-04 2.483e-04 1.057 0.290759
## stat51 1.792e-04 2.508e-04 0.715 0.474843
## stat52 -7.772e-05 2.491e-04 -0.312 0.755068
## stat53 -1.731e-05 2.510e-04 -0.069 0.945000
## stat54 -2.559e-04 2.522e-04 -1.015 0.310325
## stat55 1.420e-04 2.473e-04 0.574 0.565822
## stat56 1.711e-04 2.503e-04 0.683 0.494335
## stat57 1.389e-04 2.461e-04 0.564 0.572656
## stat58 -4.820e-05 2.481e-04 -0.194 0.845983
## stat59 2.231e-04 2.486e-04 0.897 0.369566
## stat60 5.723e-04 2.502e-04 2.287 0.022217 *
## stat61 -3.785e-04 2.505e-04 -1.511 0.130817
## stat62 -7.639e-05 2.496e-04 -0.306 0.759559
## stat63 1.609e-04 2.496e-04 0.644 0.519345
## stat64 5.628e-05 2.488e-04 0.226 0.821041
## stat65 -3.687e-04 2.514e-04 -1.466 0.142647
## stat66 1.440e-04 2.533e-04 0.568 0.569744
## stat67 1.391e-05 2.509e-04 0.055 0.955795
## stat68 8.835e-05 2.491e-04 0.355 0.722802
## stat69 4.914e-05 2.492e-04 0.197 0.843688
## stat70 5.980e-05 2.492e-04 0.240 0.810323
## stat71 6.225e-05 2.491e-04 0.250 0.802634
## stat72 3.549e-04 2.499e-04 1.420 0.155591
## stat73 3.397e-04 2.498e-04 1.360 0.173947
## stat74 -1.966e-04 2.497e-04 -0.787 0.431241
## stat75 -1.799e-04 2.509e-04 -0.717 0.473340
## stat76 9.648e-05 2.491e-04 0.387 0.698537
## stat77 -2.049e-05 2.484e-04 -0.083 0.934250
## stat78 -1.670e-04 2.506e-04 -0.666 0.505282
## stat79 -1.250e-04 2.510e-04 -0.498 0.618498
## stat80 7.305e-06 2.502e-04 0.029 0.976705
## stat81 3.807e-05 2.507e-04 0.152 0.879315
## stat82 3.161e-04 2.497e-04 1.266 0.205538
## stat83 -3.892e-04 2.501e-04 -1.556 0.119666
## stat84 -1.027e-04 2.486e-04 -0.413 0.679577
## stat85 -1.224e-04 2.505e-04 -0.489 0.625115
## stat86 -3.569e-05 2.495e-04 -0.143 0.886272
## stat87 -2.440e-04 2.502e-04 -0.975 0.329438
## stat88 4.157e-05 2.474e-04 0.168 0.866550
## stat89 -1.942e-04 2.484e-04 -0.782 0.434409
## stat90 -1.349e-04 2.486e-04 -0.543 0.587436
## stat91 -3.840e-04 2.483e-04 -1.546 0.122138
## stat92 -4.778e-04 2.503e-04 -1.909 0.056320 .
## stat93 -1.447e-04 2.531e-04 -0.572 0.567445
## stat94 -1.588e-04 2.502e-04 -0.635 0.525762
## stat95 -4.992e-05 2.496e-04 -0.200 0.841480
## stat96 -5.012e-04 2.484e-04 -2.017 0.043704 *
## stat97 1.285e-04 2.479e-04 0.518 0.604236
## stat98 3.677e-03 2.458e-04 14.957 < 2e-16 ***
## stat99 3.027e-04 2.522e-04 1.200 0.230226
## stat100 7.155e-04 2.505e-04 2.856 0.004308 **
## stat101 -2.865e-04 2.526e-04 -1.134 0.256749
## stat102 1.537e-04 2.501e-04 0.615 0.538858
## stat103 -2.925e-04 2.516e-04 -1.163 0.245038
## stat104 -1.912e-04 2.478e-04 -0.772 0.440278
## stat105 1.064e-04 2.470e-04 0.431 0.666707
## stat106 -3.019e-04 2.507e-04 -1.204 0.228632
## stat107 -2.986e-04 2.471e-04 -1.208 0.227032
## stat108 -2.424e-04 2.492e-04 -0.973 0.330678
## stat109 -2.995e-04 2.492e-04 -1.201 0.229635
## stat110 -3.341e-03 2.472e-04 -13.518 < 2e-16 ***
## stat111 -1.863e-04 2.495e-04 -0.747 0.455357
## stat112 2.893e-05 2.500e-04 0.116 0.907874
## stat113 -3.143e-04 2.505e-04 -1.255 0.209607
## stat114 2.477e-04 2.487e-04 0.996 0.319458
## stat115 1.740e-04 2.496e-04 0.697 0.485900
## stat116 3.472e-04 2.495e-04 1.391 0.164140
## stat117 -1.056e-04 2.506e-04 -0.421 0.673604
## stat118 -3.167e-04 2.465e-04 -1.285 0.199005
## stat119 2.256e-05 2.482e-04 0.091 0.927581
## stat120 1.505e-04 2.475e-04 0.608 0.543062
## stat121 -6.510e-05 2.505e-04 -0.260 0.794990
## stat122 3.297e-05 2.486e-04 0.133 0.894502
## stat123 1.158e-04 2.533e-04 0.457 0.647620
## stat124 -1.758e-04 2.495e-04 -0.704 0.481227
## stat125 1.325e-04 2.494e-04 0.531 0.595182
## stat126 1.315e-04 2.489e-04 0.528 0.597194
## stat127 2.754e-04 2.487e-04 1.107 0.268300
## stat128 -2.167e-04 2.487e-04 -0.872 0.383479
## stat129 2.801e-04 2.484e-04 1.128 0.259534
## stat130 2.470e-04 2.506e-04 0.986 0.324417
## stat131 3.025e-04 2.495e-04 1.213 0.225291
## stat132 -1.310e-04 2.476e-04 -0.529 0.596796
## stat133 -3.458e-05 2.482e-04 -0.139 0.889202
## stat134 -1.031e-04 2.485e-04 -0.415 0.678221
## stat135 -5.921e-05 2.487e-04 -0.238 0.811829
## stat136 -1.962e-05 2.504e-04 -0.078 0.937543
## stat137 -4.409e-05 2.488e-04 -0.177 0.859316
## stat138 2.106e-04 2.481e-04 0.849 0.396006
## stat139 2.304e-04 2.518e-04 0.915 0.360262
## stat140 1.903e-04 2.485e-04 0.766 0.443779
## stat141 8.273e-05 2.477e-04 0.334 0.738398
## stat142 -1.681e-04 2.515e-04 -0.668 0.503886
## stat143 -1.514e-05 2.486e-04 -0.061 0.951431
## stat144 1.104e-04 2.486e-04 0.444 0.656869
## stat145 6.258e-05 2.532e-04 0.247 0.804794
## stat146 -7.478e-04 2.510e-04 -2.980 0.002900 **
## stat147 -4.528e-04 2.508e-04 -1.806 0.071014 .
## stat148 -5.320e-04 2.479e-04 -2.146 0.031958 *
## stat149 -4.307e-04 2.508e-04 -1.717 0.085960 .
## stat150 -2.725e-05 2.502e-04 -0.109 0.913277
## stat151 -2.918e-05 2.504e-04 -0.117 0.907209
## stat152 -2.460e-04 2.470e-04 -0.996 0.319277
## stat153 -1.520e-04 2.536e-04 -0.600 0.548856
## stat154 -3.719e-05 2.519e-04 -0.148 0.882622
## stat155 2.738e-04 2.493e-04 1.098 0.272204
## stat156 4.850e-04 2.506e-04 1.936 0.052945 .
## stat157 -7.551e-05 2.475e-04 -0.305 0.760355
## stat158 1.549e-04 2.515e-04 0.616 0.538052
## stat159 -5.162e-05 2.487e-04 -0.208 0.835556
## stat160 1.935e-04 2.503e-04 0.773 0.439463
## stat161 7.585e-05 2.506e-04 0.303 0.762152
## stat162 -8.382e-05 2.493e-04 -0.336 0.736753
## stat163 1.656e-04 2.514e-04 0.659 0.510114
## stat164 2.990e-04 2.525e-04 1.184 0.236425
## stat165 -5.746e-05 2.464e-04 -0.233 0.815625
## stat166 8.754e-06 2.465e-04 0.036 0.971672
## stat167 -5.479e-05 2.499e-04 -0.219 0.826503
## stat168 -3.103e-04 2.478e-04 -1.252 0.210623
## stat169 -1.230e-04 2.490e-04 -0.494 0.621413
## stat170 -1.097e-04 2.498e-04 -0.439 0.660663
## stat171 1.451e-04 2.512e-04 0.578 0.563474
## stat172 3.834e-04 2.478e-04 1.547 0.121903
## stat173 -1.797e-04 2.503e-04 -0.718 0.472868
## stat174 1.462e-04 2.496e-04 0.586 0.558146
## stat175 -3.910e-04 2.513e-04 -1.556 0.119774
## stat176 1.150e-04 2.502e-04 0.460 0.645865
## stat177 -1.341e-04 2.503e-04 -0.536 0.592190
## stat178 -2.490e-04 2.542e-04 -0.980 0.327359
## stat179 -2.147e-05 2.482e-04 -0.086 0.931073
## stat180 -1.797e-04 2.475e-04 -0.726 0.467864
## stat181 2.657e-04 2.511e-04 1.059 0.289866
## stat182 3.110e-04 2.523e-04 1.233 0.217713
## stat183 7.981e-05 2.470e-04 0.323 0.746629
## stat184 1.852e-04 2.501e-04 0.741 0.458990
## stat185 -1.287e-04 2.468e-04 -0.522 0.601945
## stat186 -1.686e-04 2.515e-04 -0.670 0.502683
## stat187 -3.241e-04 2.480e-04 -1.307 0.191298
## stat188 -1.924e-04 2.477e-04 -0.777 0.437341
## stat189 1.759e-04 2.501e-04 0.703 0.481810
## stat190 2.818e-04 2.483e-04 1.135 0.256428
## stat191 -2.591e-04 2.485e-04 -1.043 0.297165
## stat192 5.169e-06 2.512e-04 0.021 0.983584
## stat193 2.998e-05 2.524e-04 0.119 0.905447
## stat194 7.698e-05 2.483e-04 0.310 0.756532
## stat195 4.011e-04 2.501e-04 1.604 0.108844
## stat196 -1.110e-04 2.534e-04 -0.438 0.661467
## stat197 -1.750e-05 2.458e-04 -0.071 0.943264
## stat198 -1.833e-04 2.506e-04 -0.731 0.464545
## stat199 2.582e-04 2.476e-04 1.043 0.296995
## stat200 -2.173e-04 2.471e-04 -0.880 0.379152
## stat201 3.201e-05 2.486e-04 0.129 0.897559
## stat202 -4.217e-04 2.523e-04 -1.671 0.094732 .
## stat203 3.127e-05 2.481e-04 0.126 0.899708
## stat204 -3.796e-04 2.472e-04 -1.535 0.124777
## stat205 5.813e-05 2.485e-04 0.234 0.815081
## stat206 -8.203e-05 2.518e-04 -0.326 0.744628
## stat207 3.055e-04 2.474e-04 1.235 0.217043
## stat208 1.132e-04 2.499e-04 0.453 0.650742
## stat209 -1.474e-04 2.487e-04 -0.593 0.553526
## stat210 -1.837e-04 2.493e-04 -0.737 0.461180
## stat211 -7.206e-05 2.488e-04 -0.290 0.772095
## stat212 -2.248e-05 2.474e-04 -0.091 0.927616
## stat213 8.886e-05 2.500e-04 0.355 0.722307
## stat214 -3.070e-04 2.498e-04 -1.229 0.219152
## stat215 -1.531e-04 2.505e-04 -0.611 0.540925
## stat216 -1.137e-04 2.500e-04 -0.455 0.649258
## stat217 3.294e-04 2.504e-04 1.315 0.188437
## x18.sqrt 2.653e-02 9.464e-04 28.032 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.03158 on 5343 degrees of freedom
## Multiple R-squared: 0.2757, Adjusted R-squared: 0.2432
## F-statistic: 8.476 on 240 and 5343 DF, p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)
## [1] "Number of data points that have Cook's D > 4/n: 284"
## [1] "Number of data points that have Cook's D > 1: 0"
high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
##
## Call:
## lm(formula = formula, data = data.train2)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.058186 -0.017664 -0.002528 0.016290 0.069788
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 1.948e+00 7.867e-03 247.625 < 2e-16 ***
## x1 -6.377e-05 5.386e-04 -0.118 0.905767
## x2 2.759e-04 3.445e-04 0.801 0.423264
## x3 -1.922e-05 9.402e-05 -0.204 0.838044
## x4 -5.981e-05 7.502e-06 -7.972 1.91e-15 ***
## x5 4.093e-04 2.436e-04 1.680 0.092950 .
## x6 -3.784e-04 4.896e-04 -0.773 0.439593
## x7 1.217e-02 5.249e-04 23.190 < 2e-16 ***
## x8 4.293e-04 1.230e-04 3.491 0.000485 ***
## x9 3.296e-03 2.725e-04 12.093 < 2e-16 ***
## x10 1.873e-03 2.542e-04 7.369 2.00e-13 ***
## x11 2.875e+05 6.094e+04 4.717 2.45e-06 ***
## x12 5.809e-05 1.547e-04 0.376 0.707213
## x13 8.143e-05 6.183e-05 1.317 0.187927
## x14 -3.772e-04 2.659e-04 -1.419 0.156013
## x15 1.344e-04 2.546e-04 0.528 0.597683
## x16 1.075e-03 1.760e-04 6.108 1.09e-09 ***
## x17 1.762e-03 2.694e-04 6.541 6.71e-11 ***
## x19 2.156e-04 1.371e-04 1.573 0.115750
## x20 -2.731e-04 9.462e-04 -0.289 0.772870
## x21 1.471e-04 3.477e-05 4.230 2.38e-05 ***
## x22 -3.615e-04 2.837e-04 -1.274 0.202713
## x23 -2.388e-05 2.716e-04 -0.088 0.929919
## stat1 -2.044e-04 2.060e-04 -0.992 0.321090
## stat2 1.556e-04 2.030e-04 0.767 0.443366
## stat3 3.794e-04 2.052e-04 1.849 0.064496 .
## stat4 -6.022e-04 2.072e-04 -2.906 0.003671 **
## stat5 -2.007e-04 2.051e-04 -0.979 0.327713
## stat6 -1.305e-04 2.045e-04 -0.638 0.523420
## stat7 -3.877e-04 2.044e-04 -1.896 0.057964 .
## stat8 2.705e-04 2.043e-04 1.324 0.185451
## stat9 -1.525e-05 2.056e-04 -0.074 0.940859
## stat10 -2.589e-04 2.050e-04 -1.263 0.206600
## stat11 -4.472e-04 2.073e-04 -2.157 0.031055 *
## stat12 1.503e-04 2.036e-04 0.738 0.460322
## stat13 -6.610e-04 2.035e-04 -3.248 0.001168 **
## stat14 -9.847e-04 2.032e-04 -4.846 1.30e-06 ***
## stat15 -4.366e-04 2.037e-04 -2.143 0.032129 *
## stat16 -5.448e-05 2.048e-04 -0.266 0.790233
## stat17 -2.621e-04 2.028e-04 -1.292 0.196352
## stat18 -3.863e-04 2.043e-04 -1.891 0.058621 .
## stat19 2.872e-04 2.043e-04 1.405 0.159974
## stat20 1.593e-04 2.046e-04 0.779 0.436239
## stat21 -1.963e-04 2.054e-04 -0.956 0.339269
## stat22 -3.181e-04 2.048e-04 -1.553 0.120506
## stat23 4.623e-04 2.046e-04 2.259 0.023911 *
## stat24 -5.004e-04 2.064e-04 -2.424 0.015374 *
## stat25 -3.786e-04 2.040e-04 -1.856 0.063458 .
## stat26 -3.661e-04 2.048e-04 -1.788 0.073911 .
## stat27 2.035e-04 2.044e-04 0.996 0.319532
## stat28 -4.105e-05 2.045e-04 -0.201 0.840897
## stat29 3.222e-04 2.063e-04 1.562 0.118341
## stat30 2.311e-04 2.069e-04 1.117 0.264141
## stat31 1.102e-04 2.065e-04 0.534 0.593483
## stat32 -7.713e-06 2.065e-04 -0.037 0.970203
## stat33 -3.347e-04 2.042e-04 -1.639 0.101201
## stat34 2.860e-04 2.052e-04 1.394 0.163456
## stat35 -2.654e-04 2.055e-04 -1.291 0.196664
## stat36 4.764e-05 2.031e-04 0.235 0.814537
## stat37 -5.217e-05 2.058e-04 -0.254 0.799879
## stat38 2.283e-04 2.054e-04 1.112 0.266371
## stat39 -2.326e-04 2.030e-04 -1.146 0.251821
## stat40 1.871e-04 2.043e-04 0.916 0.359863
## stat41 -4.275e-04 2.028e-04 -2.108 0.035118 *
## stat42 2.533e-04 2.039e-04 1.242 0.214211
## stat43 -3.345e-04 2.043e-04 -1.637 0.101623
## stat44 1.873e-04 2.056e-04 0.911 0.362389
## stat45 3.781e-05 2.051e-04 0.184 0.853753
## stat46 3.805e-04 2.045e-04 1.861 0.062842 .
## stat47 2.096e-04 2.057e-04 1.019 0.308342
## stat48 3.035e-04 2.046e-04 1.483 0.138131
## stat49 6.491e-05 2.038e-04 0.319 0.750061
## stat50 3.134e-04 2.041e-04 1.535 0.124814
## stat51 4.548e-05 2.060e-04 0.221 0.825298
## stat52 1.376e-07 2.047e-04 0.001 0.999464
## stat53 -7.636e-05 2.061e-04 -0.371 0.710990
## stat54 -3.635e-04 2.077e-04 -1.750 0.080127 .
## stat55 1.442e-04 2.032e-04 0.709 0.478063
## stat56 2.929e-04 2.056e-04 1.425 0.154307
## stat57 9.230e-05 2.026e-04 0.456 0.648714
## stat58 -7.732e-05 2.035e-04 -0.380 0.704048
## stat59 2.015e-04 2.043e-04 0.986 0.323978
## stat60 6.398e-04 2.054e-04 3.116 0.001845 **
## stat61 -4.421e-04 2.056e-04 -2.150 0.031617 *
## stat62 -2.611e-04 2.046e-04 -1.276 0.201885
## stat63 5.829e-05 2.052e-04 0.284 0.776353
## stat64 2.394e-04 2.044e-04 1.171 0.241500
## stat65 -2.207e-04 2.064e-04 -1.069 0.284978
## stat66 1.747e-04 2.082e-04 0.839 0.401419
## stat67 1.610e-04 2.059e-04 0.782 0.434329
## stat68 -1.423e-06 2.047e-04 -0.007 0.994453
## stat69 -1.192e-04 2.046e-04 -0.583 0.560168
## stat70 1.605e-04 2.049e-04 0.783 0.433509
## stat71 2.712e-05 2.050e-04 0.132 0.894755
## stat72 1.781e-04 2.052e-04 0.868 0.385566
## stat73 3.713e-04 2.054e-04 1.808 0.070631 .
## stat74 -7.966e-05 2.049e-04 -0.389 0.697502
## stat75 3.440e-05 2.062e-04 0.167 0.867507
## stat76 1.330e-04 2.045e-04 0.650 0.515611
## stat77 2.264e-04 2.041e-04 1.109 0.267370
## stat78 -4.841e-04 2.054e-04 -2.357 0.018439 *
## stat79 6.126e-05 2.057e-04 0.298 0.765840
## stat80 -6.550e-05 2.056e-04 -0.319 0.750049
## stat81 -4.088e-05 2.065e-04 -0.198 0.843084
## stat82 1.639e-04 2.051e-04 0.799 0.424292
## stat83 -4.428e-04 2.055e-04 -2.155 0.031206 *
## stat84 -2.882e-04 2.041e-04 -1.412 0.157941
## stat85 -3.378e-04 2.059e-04 -1.641 0.100952
## stat86 1.547e-04 2.048e-04 0.755 0.450295
## stat87 -2.072e-04 2.057e-04 -1.007 0.313799
## stat88 1.661e-04 2.032e-04 0.817 0.413950
## stat89 8.700e-05 2.047e-04 0.425 0.670831
## stat90 -2.455e-04 2.045e-04 -1.201 0.229951
## stat91 -3.149e-04 2.037e-04 -1.546 0.122057
## stat92 -4.849e-04 2.054e-04 -2.361 0.018260 *
## stat93 -1.245e-04 2.086e-04 -0.597 0.550777
## stat94 3.791e-05 2.051e-04 0.185 0.853327
## stat95 5.988e-05 2.055e-04 0.291 0.770809
## stat96 -4.818e-04 2.041e-04 -2.360 0.018298 *
## stat97 2.990e-04 2.034e-04 1.470 0.141618
## stat98 3.625e-03 2.022e-04 17.928 < 2e-16 ***
## stat99 4.306e-04 2.069e-04 2.081 0.037502 *
## stat100 6.788e-04 2.058e-04 3.299 0.000978 ***
## stat101 -2.650e-04 2.073e-04 -1.278 0.201146
## stat102 1.553e-04 2.054e-04 0.756 0.449769
## stat103 -3.842e-04 2.063e-04 -1.863 0.062575 .
## stat104 -1.700e-04 2.039e-04 -0.834 0.404424
## stat105 2.073e-04 2.029e-04 1.022 0.306934
## stat106 -2.872e-04 2.055e-04 -1.397 0.162325
## stat107 -1.469e-04 2.025e-04 -0.725 0.468368
## stat108 -5.871e-05 2.048e-04 -0.287 0.774329
## stat109 -3.722e-04 2.045e-04 -1.820 0.068854 .
## stat110 -3.385e-03 2.029e-04 -16.682 < 2e-16 ***
## stat111 -1.191e-04 2.045e-04 -0.583 0.560253
## stat112 9.175e-05 2.057e-04 0.446 0.655645
## stat113 -2.499e-04 2.055e-04 -1.216 0.223938
## stat114 2.395e-04 2.045e-04 1.171 0.241579
## stat115 3.186e-04 2.052e-04 1.552 0.120624
## stat116 2.966e-04 2.048e-04 1.448 0.147615
## stat117 -8.793e-05 2.054e-04 -0.428 0.668658
## stat118 -1.121e-05 2.020e-04 -0.056 0.955733
## stat119 1.404e-04 2.036e-04 0.689 0.490562
## stat120 1.780e-05 2.032e-04 0.088 0.930202
## stat121 -6.201e-06 2.056e-04 -0.030 0.975941
## stat122 -1.454e-04 2.045e-04 -0.711 0.477177
## stat123 1.647e-04 2.080e-04 0.792 0.428442
## stat124 -1.480e-04 2.047e-04 -0.723 0.469716
## stat125 3.774e-05 2.047e-04 0.184 0.853721
## stat126 1.502e-04 2.042e-04 0.735 0.462169
## stat127 2.360e-04 2.041e-04 1.156 0.247626
## stat128 -5.200e-04 2.041e-04 -2.548 0.010856 *
## stat129 3.735e-04 2.036e-04 1.834 0.066669 .
## stat130 1.598e-04 2.056e-04 0.777 0.436996
## stat131 1.644e-04 2.045e-04 0.804 0.421432
## stat132 -4.304e-05 2.031e-04 -0.212 0.832172
## stat133 6.853e-05 2.038e-04 0.336 0.736731
## stat134 -8.923e-05 2.040e-04 -0.437 0.661898
## stat135 -1.601e-05 2.042e-04 -0.078 0.937534
## stat136 -5.440e-05 2.052e-04 -0.265 0.790972
## stat137 6.551e-05 2.040e-04 0.321 0.748133
## stat138 1.764e-04 2.038e-04 0.866 0.386648
## stat139 1.206e-04 2.067e-04 0.584 0.559556
## stat140 2.511e-04 2.035e-04 1.234 0.217278
## stat141 1.334e-04 2.032e-04 0.657 0.511472
## stat142 -1.735e-04 2.067e-04 -0.840 0.401173
## stat143 -1.753e-04 2.043e-04 -0.858 0.390871
## stat144 2.109e-04 2.042e-04 1.033 0.301860
## stat145 -5.852e-05 2.080e-04 -0.281 0.778439
## stat146 -9.620e-04 2.058e-04 -4.675 3.02e-06 ***
## stat147 -3.941e-04 2.063e-04 -1.910 0.056135 .
## stat148 -4.098e-04 2.034e-04 -2.015 0.043982 *
## stat149 -5.657e-04 2.066e-04 -2.739 0.006194 **
## stat150 -2.295e-04 2.058e-04 -1.115 0.264847
## stat151 2.556e-04 2.058e-04 1.242 0.214391
## stat152 -2.793e-04 2.026e-04 -1.378 0.168180
## stat153 1.067e-04 2.080e-04 0.513 0.608117
## stat154 2.531e-04 2.073e-04 1.221 0.222200
## stat155 4.028e-04 2.050e-04 1.965 0.049458 *
## stat156 4.113e-04 2.054e-04 2.002 0.045331 *
## stat157 -1.189e-04 2.030e-04 -0.586 0.557862
## stat158 2.777e-04 2.060e-04 1.348 0.177789
## stat159 2.332e-05 2.042e-04 0.114 0.909103
## stat160 1.066e-04 2.061e-04 0.517 0.605134
## stat161 2.413e-05 2.055e-04 0.117 0.906504
## stat162 -1.212e-04 2.041e-04 -0.594 0.552496
## stat163 2.225e-04 2.072e-04 1.074 0.282881
## stat164 5.406e-05 2.079e-04 0.260 0.794844
## stat165 1.067e-04 2.024e-04 0.527 0.598027
## stat166 1.204e-04 2.019e-04 0.597 0.550819
## stat167 -1.307e-04 2.050e-04 -0.638 0.523817
## stat168 -2.572e-04 2.032e-04 -1.266 0.205654
## stat169 -3.618e-05 2.051e-04 -0.176 0.859936
## stat170 3.935e-05 2.054e-04 0.192 0.848078
## stat171 -3.488e-05 2.062e-04 -0.169 0.865651
## stat172 6.266e-04 2.029e-04 3.089 0.002019 **
## stat173 1.352e-04 2.054e-04 0.658 0.510507
## stat174 2.411e-04 2.050e-04 1.176 0.239609
## stat175 -2.703e-04 2.063e-04 -1.310 0.190222
## stat176 -1.903e-04 2.054e-04 -0.926 0.354270
## stat177 -4.077e-04 2.054e-04 -1.985 0.047183 *
## stat178 -9.255e-06 2.085e-04 -0.044 0.964596
## stat179 -4.820e-05 2.038e-04 -0.236 0.813097
## stat180 -1.586e-04 2.035e-04 -0.779 0.435778
## stat181 3.599e-04 2.060e-04 1.747 0.080629 .
## stat182 4.208e-04 2.076e-04 2.027 0.042747 *
## stat183 1.673e-04 2.029e-04 0.824 0.409724
## stat184 2.457e-04 2.051e-04 1.198 0.231005
## stat185 4.648e-05 2.027e-04 0.229 0.818610
## stat186 1.194e-04 2.063e-04 0.579 0.562844
## stat187 -2.259e-04 2.035e-04 -1.110 0.266846
## stat188 -4.795e-06 2.037e-04 -0.024 0.981220
## stat189 2.120e-05 2.056e-04 0.103 0.917869
## stat190 1.154e-04 2.041e-04 0.566 0.571736
## stat191 -2.161e-04 2.038e-04 -1.060 0.289178
## stat192 -1.255e-05 2.067e-04 -0.061 0.951563
## stat193 2.626e-04 2.077e-04 1.264 0.206210
## stat194 1.943e-06 2.043e-04 0.010 0.992413
## stat195 8.818e-05 2.056e-04 0.429 0.667997
## stat196 -1.649e-04 2.076e-04 -0.795 0.426918
## stat197 -2.453e-04 2.021e-04 -1.214 0.224808
## stat198 -1.790e-04 2.058e-04 -0.870 0.384479
## stat199 2.439e-04 2.032e-04 1.200 0.230220
## stat200 -2.016e-04 2.033e-04 -0.992 0.321360
## stat201 1.959e-04 2.043e-04 0.959 0.337535
## stat202 -3.265e-04 2.074e-04 -1.574 0.115472
## stat203 1.647e-04 2.040e-04 0.807 0.419565
## stat204 -5.445e-05 2.029e-04 -0.268 0.788489
## stat205 2.141e-04 2.033e-04 1.053 0.292164
## stat206 -1.487e-04 2.068e-04 -0.719 0.472283
## stat207 3.776e-04 2.034e-04 1.856 0.063478 .
## stat208 4.002e-05 2.053e-04 0.195 0.845481
## stat209 -7.536e-06 2.038e-04 -0.037 0.970508
## stat210 -2.480e-04 2.045e-04 -1.213 0.225305
## stat211 -2.796e-05 2.043e-04 -0.137 0.891175
## stat212 1.317e-04 2.035e-04 0.647 0.517588
## stat213 3.248e-05 2.050e-04 0.158 0.874086
## stat214 -1.896e-04 2.056e-04 -0.922 0.356475
## stat215 -2.584e-04 2.060e-04 -1.254 0.209832
## stat216 -1.863e-04 2.046e-04 -0.911 0.362536
## stat217 1.544e-04 2.052e-04 0.752 0.451934
## x18.sqrt 2.662e-02 7.742e-04 34.388 < 2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.02523 on 5059 degrees of freedom
## Multiple R-squared: 0.3871, Adjusted R-squared: 0.358
## F-statistic: 13.31 on 240 and 5059 DF, p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)
## [1] "Number of data points that have Cook's D > 4/n: 291"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before.
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot
plotData = data.train %>%
rownames_to_column() %>%
mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
dplyr::select(type,target=one_of(label.names))
ggplot(data=plotData, aes(x=type,y=target)) +
geom_boxplot(fill='light blue',outlier.shape=NA) +
scale_y_continuous(name="Target Variable Values",label=scales::comma_format(accuracy=.1)) +
theme_light() +
ggtitle('Distribution of High Leverage Points and Normal Points')
plotData = data.train %>%
rownames_to_column() %>%
mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names))
, function(x) t.test(x ~ plotData$type, var.equal = TRUE))
sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
## x4 x5 x16 stat15 stat27 stat38 stat98 stat110 stat128
## 7.538299e-03 3.428941e-02 4.942924e-02 3.782893e-02 3.005070e-02 2.614385e-02 1.238531e-06 4.763874e-03 6.922363e-03
## stat146 stat151 stat186 stat193 x18.sqrt
## 1.415771e-02 3.146729e-02 3.675378e-02 3.756332e-02 1.462902e-02
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))
ggplot(mm) +
geom_boxplot(aes(x=type, y=value))+
facet_wrap(~variable, ncol=8, scales = 'free_y') +
scale_y_continuous(name="values",label=scales::comma_format(accuracy=.1)) +
ggtitle('Distribution of High Leverage Points and Normal Points')
model.null = lm(grand.mean.formula, data.train)
summary(model.null)
##
## Call:
## lm(formula = grand.mean.formula, data = data.train)
##
## Residuals:
## Min 1Q Median 3Q Max
## -0.114816 -0.023952 -0.003362 0.020696 0.190497
##
## Coefficients:
## Estimate Std. Error t value Pr(>|t|)
## (Intercept) 2.0966918 0.0004858 4316 <2e-16 ***
## ---
## Signif. codes: 0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
##
## Residual standard error: 0.0363 on 5583 degrees of freedom
Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/
if (algo.forward.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
, data = data.train
, method = "leapForward"
, feature.names = feature.names)
model.forward = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 15 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03426021 0.1102603 0.02678506 0.0007528041 0.02129231 0.0004213658
## 2 2 0.03341255 0.1548136 0.02608465 0.0009549896 0.03477939 0.0005441361
## 3 3 0.03278951 0.1857579 0.02547103 0.0009056890 0.03288428 0.0004986653
## 4 4 0.03227713 0.2113517 0.02470874 0.0008645806 0.03734399 0.0005939377
## 5 5 0.03198524 0.2249611 0.02451264 0.0009072163 0.03436803 0.0005425191
## 6 6 0.03196756 0.2257863 0.02450055 0.0009147022 0.03323145 0.0005522053
## 7 7 0.03186275 0.2306455 0.02443907 0.0008841335 0.03173780 0.0005201270
## 8 8 0.03180485 0.2333632 0.02440347 0.0008909229 0.03190063 0.0004926743
## 9 9 0.03174677 0.2360753 0.02438467 0.0009075872 0.03063354 0.0004965026
## 10 10 0.03168841 0.2387674 0.02432430 0.0008999805 0.02990088 0.0004865291
## 11 11 0.03169907 0.2381655 0.02434296 0.0008523372 0.02756473 0.0004382474
## 12 12 0.03169542 0.2382564 0.02435400 0.0008401842 0.02585450 0.0004245230
## 13 13 0.03167366 0.2392646 0.02432044 0.0008373043 0.02565854 0.0004341691
## 14 14 0.03166420 0.2397724 0.02430440 0.0008508276 0.02621653 0.0004402137
## 15 15 0.03163949 0.2409469 0.02429082 0.0008593315 0.02618453 0.0004442920
## 16 16 0.03165164 0.2403430 0.02429781 0.0008321322 0.02526408 0.0004478658
## 17 17 0.03165851 0.2400597 0.02429486 0.0008510357 0.02571485 0.0004432393
## 18 18 0.03163951 0.2409482 0.02428901 0.0008616926 0.02483458 0.0004450653
## 19 19 0.03167827 0.2390992 0.02432694 0.0008345569 0.02370360 0.0004271269
## 20 20 0.03170627 0.2377880 0.02434710 0.0008290882 0.02341048 0.0004223665
## 21 21 0.03173214 0.2366471 0.02436755 0.0008324223 0.02405134 0.0004327652
## 22 22 0.03176505 0.2350680 0.02439784 0.0008294861 0.02361180 0.0004233768
## 23 23 0.03176223 0.2352378 0.02440165 0.0008317759 0.02380651 0.0004236607
## 24 24 0.03176725 0.2350697 0.02441149 0.0008418167 0.02441508 0.0004374781
## 25 25 0.03178229 0.2344164 0.02442767 0.0008357834 0.02492069 0.0004356016
## 26 26 0.03180622 0.2333191 0.02444664 0.0008366503 0.02485684 0.0004381811
## 27 27 0.03181312 0.2330129 0.02444572 0.0008290087 0.02446424 0.0004249594
## 28 28 0.03182294 0.2326216 0.02443501 0.0008141599 0.02425661 0.0004183447
## 29 29 0.03183304 0.2321204 0.02444154 0.0008082972 0.02396822 0.0004176705
## 30 30 0.03184978 0.2313617 0.02444502 0.0008022351 0.02378389 0.0004065443
## 31 31 0.03185901 0.2309734 0.02444885 0.0007929067 0.02287079 0.0004023088
## 32 32 0.03188304 0.2299186 0.02447488 0.0007895438 0.02342731 0.0004049664
## 33 33 0.03189546 0.2294032 0.02447748 0.0007853423 0.02448265 0.0004046648
## 34 34 0.03190049 0.2291563 0.02447437 0.0007785900 0.02410299 0.0003940673
## 35 35 0.03190137 0.2291546 0.02447921 0.0007786900 0.02397655 0.0003994480
## 36 36 0.03190426 0.2290876 0.02448473 0.0007744877 0.02391424 0.0004010935
## 37 37 0.03190988 0.2289050 0.02447669 0.0007893986 0.02472139 0.0004088587
## 38 38 0.03192742 0.2281280 0.02450082 0.0007844912 0.02473346 0.0004118077
## 39 39 0.03194180 0.2274844 0.02451534 0.0007977205 0.02491037 0.0004218552
## 40 40 0.03195152 0.2270923 0.02452470 0.0008039877 0.02501946 0.0004218392
## 41 41 0.03196520 0.2265038 0.02453214 0.0007948824 0.02484698 0.0004176748
## 42 42 0.03197139 0.2262985 0.02453694 0.0007822986 0.02513960 0.0004144641
## 43 43 0.03198537 0.2256844 0.02454857 0.0007816034 0.02609210 0.0004115544
## 44 44 0.03198423 0.2257095 0.02454886 0.0007914110 0.02553364 0.0004105515
## 45 45 0.03199474 0.2252278 0.02455186 0.0008042751 0.02509173 0.0004165877
## 46 46 0.03200107 0.2249620 0.02455834 0.0008114790 0.02498185 0.0004118621
## 47 47 0.03201505 0.2243482 0.02456905 0.0008170497 0.02417158 0.0004038639
## 48 48 0.03203447 0.2234964 0.02458677 0.0008207037 0.02398047 0.0004118974
## 49 49 0.03204966 0.2227990 0.02459871 0.0008310866 0.02385566 0.0004158852
## 50 50 0.03205584 0.2225217 0.02460689 0.0008276291 0.02425146 0.0004147309
## 51 51 0.03205271 0.2227071 0.02460448 0.0008251882 0.02430544 0.0004031177
## 52 52 0.03205136 0.2228096 0.02460017 0.0008285602 0.02422718 0.0004061043
## 53 53 0.03205729 0.2225258 0.02461368 0.0008369043 0.02348778 0.0003998033
## 54 54 0.03206538 0.2221869 0.02461197 0.0008371078 0.02335276 0.0004055748
## 55 55 0.03207595 0.2217341 0.02461556 0.0008420362 0.02323943 0.0004042657
## 56 56 0.03208132 0.2215685 0.02461177 0.0008305296 0.02288161 0.0004114346
## 57 57 0.03209571 0.2210120 0.02462357 0.0008424689 0.02348665 0.0004368620
## 58 58 0.03210381 0.2206744 0.02463088 0.0008365657 0.02311960 0.0004358985
## 59 59 0.03210164 0.2207993 0.02463965 0.0008315634 0.02308960 0.0004321107
## 60 60 0.03210488 0.2207022 0.02464016 0.0008541732 0.02335477 0.0004559694
## 61 61 0.03210438 0.2207147 0.02463955 0.0008431746 0.02325433 0.0004470284
## 62 62 0.03210947 0.2205243 0.02464388 0.0008414266 0.02302778 0.0004388893
## 63 63 0.03211761 0.2201934 0.02464402 0.0008309068 0.02274551 0.0004248323
## 64 64 0.03212435 0.2199122 0.02464921 0.0008185194 0.02237781 0.0004231796
## 65 65 0.03212728 0.2197865 0.02464404 0.0008220543 0.02252348 0.0004247080
## 66 66 0.03213500 0.2194779 0.02464902 0.0008153190 0.02222005 0.0004030686
## 67 67 0.03213644 0.2194295 0.02464785 0.0008205327 0.02215100 0.0003984378
## 68 68 0.03213748 0.2194193 0.02465613 0.0008242981 0.02228278 0.0004012508
## 69 69 0.03213897 0.2193680 0.02465300 0.0008271265 0.02249565 0.0003992050
## 70 70 0.03214416 0.2191092 0.02465312 0.0008198294 0.02177535 0.0003974562
## 71 71 0.03214006 0.2193066 0.02464383 0.0008278115 0.02207390 0.0004018038
## 72 72 0.03215592 0.2186462 0.02465430 0.0008329419 0.02274761 0.0004085917
## 73 73 0.03216425 0.2183056 0.02465620 0.0008234252 0.02230127 0.0004086639
## 74 74 0.03217120 0.2180078 0.02465456 0.0008355316 0.02192199 0.0004126484
## 75 75 0.03216813 0.2182071 0.02464774 0.0008332710 0.02231807 0.0004173926
## 76 76 0.03217292 0.2180486 0.02465192 0.0008305880 0.02249957 0.0004152097
## 77 77 0.03217278 0.2180923 0.02465874 0.0008292777 0.02273119 0.0004235582
## 78 78 0.03217566 0.2179697 0.02466583 0.0008192179 0.02242350 0.0004253432
## 79 79 0.03219317 0.2172244 0.02467342 0.0008252301 0.02282468 0.0004416766
## 80 80 0.03220749 0.2166414 0.02469160 0.0008181437 0.02265092 0.0004327592
## 81 81 0.03219981 0.2169745 0.02468526 0.0008030665 0.02150155 0.0004127828
## 82 82 0.03219604 0.2171576 0.02467470 0.0007990609 0.02105661 0.0004182189
## 83 83 0.03220285 0.2168978 0.02467794 0.0008034094 0.02131610 0.0004220929
## 84 84 0.03221466 0.2163894 0.02469050 0.0007934029 0.02074806 0.0004122794
## 85 85 0.03220340 0.2168759 0.02468516 0.0007879490 0.02098153 0.0004140607
## 86 86 0.03220948 0.2166152 0.02469995 0.0007892052 0.02120931 0.0004030460
## 87 87 0.03221528 0.2163682 0.02470570 0.0008021915 0.02115657 0.0004113510
## 88 88 0.03222237 0.2160910 0.02471277 0.0008008791 0.02113310 0.0004155982
## 89 89 0.03222398 0.2159829 0.02470692 0.0007999480 0.02086363 0.0004093909
## 90 90 0.03221903 0.2162232 0.02469708 0.0008027121 0.02118674 0.0004170575
## 91 91 0.03222070 0.2161654 0.02469753 0.0008109831 0.02132853 0.0004174175
## 92 92 0.03221782 0.2163085 0.02469331 0.0008159746 0.02128971 0.0004132119
## 93 93 0.03221428 0.2164828 0.02468448 0.0008177147 0.02132864 0.0004152499
## 94 94 0.03221665 0.2163772 0.02469143 0.0008169074 0.02108369 0.0004156402
## 95 95 0.03222475 0.2160430 0.02470089 0.0008209146 0.02097618 0.0004159653
## 96 96 0.03222425 0.2160981 0.02469438 0.0008189753 0.02089167 0.0004144920
## 97 97 0.03222970 0.2158996 0.02470455 0.0008169783 0.02117029 0.0004067262
## 98 98 0.03223003 0.2159077 0.02470973 0.0008166621 0.02142947 0.0004062372
## 99 99 0.03222301 0.2162532 0.02470350 0.0008150071 0.02129735 0.0004006624
## 100 100 0.03223571 0.2157114 0.02471248 0.0008137542 0.02077248 0.0003997836
## 101 101 0.03224534 0.2153242 0.02472065 0.0008091270 0.02086575 0.0003976216
## 102 102 0.03224946 0.2151681 0.02472956 0.0008136732 0.02117533 0.0004047948
## 103 103 0.03224141 0.2155583 0.02471891 0.0008018721 0.02122691 0.0004108949
## 104 104 0.03224212 0.2154790 0.02472284 0.0007962840 0.02099192 0.0004110076
## 105 105 0.03224151 0.2154858 0.02471641 0.0007941398 0.02069080 0.0004136345
## 106 106 0.03224538 0.2153487 0.02471573 0.0007923730 0.02116361 0.0004120177
## 107 107 0.03224810 0.2152266 0.02471626 0.0007838558 0.02086815 0.0004052025
## 108 108 0.03225143 0.2151277 0.02471540 0.0007888712 0.02104432 0.0004138911
## 109 109 0.03225467 0.2149900 0.02471845 0.0007944018 0.02152057 0.0004191947
## 110 110 0.03226019 0.2147780 0.02472409 0.0007967409 0.02145995 0.0004178646
## 111 111 0.03226490 0.2146157 0.02472572 0.0007917044 0.02135171 0.0004251457
## 112 112 0.03226270 0.2147112 0.02472121 0.0007860524 0.02073993 0.0004166495
## 113 113 0.03226559 0.2146022 0.02471867 0.0007916445 0.02068466 0.0004158401
## 114 114 0.03227292 0.2142725 0.02472333 0.0007912540 0.02031648 0.0004084945
## 115 115 0.03227770 0.2140817 0.02472167 0.0007901162 0.02020176 0.0004028207
## 116 116 0.03227340 0.2143063 0.02471510 0.0007937478 0.02055446 0.0004051135
## 117 117 0.03226981 0.2144630 0.02471489 0.0007986030 0.02080442 0.0004072394
## 118 118 0.03226950 0.2144552 0.02471184 0.0007934451 0.02009658 0.0004045565
## 119 119 0.03226662 0.2145847 0.02470615 0.0007892065 0.02000647 0.0003965637
## 120 120 0.03226843 0.2144857 0.02470733 0.0007889785 0.01971223 0.0003900070
## 121 121 0.03228178 0.2139481 0.02472032 0.0007896108 0.02015600 0.0003859801
## 122 122 0.03227641 0.2141974 0.02471753 0.0007848716 0.01999103 0.0003869209
## 123 123 0.03227859 0.2141153 0.02472194 0.0007824965 0.01996990 0.0003832333
## 124 124 0.03227739 0.2141801 0.02471985 0.0007865484 0.02015117 0.0003853512
## 125 125 0.03226988 0.2145171 0.02471646 0.0007868318 0.02021949 0.0003832078
## 126 126 0.03227981 0.2141037 0.02472450 0.0007839754 0.01974734 0.0003799442
## 127 127 0.03227668 0.2142629 0.02472476 0.0007770422 0.01977257 0.0003715059
## 128 128 0.03228127 0.2141043 0.02473012 0.0007740786 0.01988308 0.0003699428
## 129 129 0.03228257 0.2140426 0.02473057 0.0007743198 0.02005856 0.0003723835
## 130 130 0.03228115 0.2140942 0.02473109 0.0007746068 0.02017804 0.0003729185
## 131 131 0.03228343 0.2139905 0.02473374 0.0007739370 0.01986336 0.0003749385
## 132 132 0.03228962 0.2137657 0.02473694 0.0007775926 0.01990013 0.0003827151
## 133 133 0.03229008 0.2137700 0.02473461 0.0007818234 0.02031708 0.0003845392
## 134 134 0.03228840 0.2138792 0.02473473 0.0007855359 0.02042269 0.0003891061
## 135 135 0.03229462 0.2136247 0.02474026 0.0007821553 0.02059436 0.0003874865
## 136 136 0.03229480 0.2136320 0.02474016 0.0007912122 0.02085632 0.0003953227
## 137 137 0.03229156 0.2137780 0.02473756 0.0007902473 0.02072796 0.0003990039
## 138 138 0.03228708 0.2139802 0.02473099 0.0007891008 0.02069433 0.0003982005
## 139 139 0.03228763 0.2139680 0.02472786 0.0007948860 0.02079682 0.0004017013
## 140 140 0.03228882 0.2139286 0.02473014 0.0007886465 0.02077376 0.0003930317
## 141 141 0.03229700 0.2135550 0.02473824 0.0007780274 0.02040044 0.0003899584
## 142 142 0.03230359 0.2133037 0.02473868 0.0007798746 0.02059502 0.0003919698
## 143 143 0.03230772 0.2130888 0.02474136 0.0007816932 0.02059172 0.0003872797
## 144 144 0.03230725 0.2130802 0.02474120 0.0007813207 0.02023630 0.0003876276
## 145 145 0.03231029 0.2129471 0.02474116 0.0007795320 0.02001875 0.0003939202
## 146 146 0.03231147 0.2129093 0.02474407 0.0007834038 0.01984737 0.0003921626
## 147 147 0.03231354 0.2128429 0.02474667 0.0007784123 0.01993042 0.0003950415
## 148 148 0.03231701 0.2127089 0.02474938 0.0007749306 0.01985020 0.0003921134
## 149 149 0.03232053 0.2125630 0.02475411 0.0007718220 0.01996978 0.0003854299
## 150 150 0.03232307 0.2124405 0.02475608 0.0007766287 0.02006867 0.0003833535
## 151 151 0.03232597 0.2123260 0.02475721 0.0007730687 0.01982289 0.0003830935
## 152 152 0.03232718 0.2122807 0.02475863 0.0007761854 0.01971293 0.0003810422
## 153 153 0.03232480 0.2123958 0.02475678 0.0007691622 0.01948101 0.0003723604
## 154 154 0.03232780 0.2122667 0.02476071 0.0007741230 0.01960951 0.0003773174
## 155 155 0.03232180 0.2125497 0.02475543 0.0007768100 0.01972016 0.0003774371
## 156 156 0.03232211 0.2125409 0.02475264 0.0007745373 0.01948584 0.0003768117
## 157 157 0.03232019 0.2126225 0.02474860 0.0007806056 0.01963101 0.0003812292
## 158 158 0.03231722 0.2127569 0.02474628 0.0007830220 0.01967356 0.0003825805
## 159 159 0.03231642 0.2127991 0.02474950 0.0007903182 0.01989897 0.0003863968
## 160 160 0.03231809 0.2127176 0.02475053 0.0007876485 0.01983771 0.0003813409
## 161 161 0.03231450 0.2128905 0.02474781 0.0007906014 0.01995722 0.0003865417
## 162 162 0.03231628 0.2128217 0.02475047 0.0007870514 0.01984203 0.0003835790
## 163 163 0.03231652 0.2128236 0.02475212 0.0007864822 0.01981717 0.0003817055
## 164 164 0.03232279 0.2125735 0.02475540 0.0007834748 0.01951396 0.0003782665
## 165 165 0.03232360 0.2125520 0.02475839 0.0007826981 0.01961689 0.0003753845
## 166 166 0.03232495 0.2124910 0.02475766 0.0007825144 0.01948164 0.0003763930
## 167 167 0.03232805 0.2123906 0.02475772 0.0007798184 0.01947377 0.0003725254
## 168 168 0.03232898 0.2123497 0.02476000 0.0007792919 0.01951105 0.0003710904
## 169 169 0.03233230 0.2122082 0.02476224 0.0007778043 0.01934343 0.0003695135
## 170 170 0.03233765 0.2119926 0.02476557 0.0007787126 0.01923285 0.0003724189
## 171 171 0.03233583 0.2120485 0.02476586 0.0007809336 0.01928566 0.0003743559
## 172 172 0.03233719 0.2120029 0.02476697 0.0007792179 0.01917393 0.0003744626
## 173 173 0.03234206 0.2118032 0.02477171 0.0007782846 0.01929067 0.0003736492
## 174 174 0.03233861 0.2119442 0.02477264 0.0007748663 0.01927503 0.0003740469
## 175 175 0.03233576 0.2120447 0.02476981 0.0007765825 0.01941060 0.0003767533
## 176 176 0.03234024 0.2118753 0.02477191 0.0007803193 0.01946965 0.0003768825
## 177 177 0.03234061 0.2118605 0.02477242 0.0007799150 0.01943705 0.0003794650
## 178 178 0.03233847 0.2119516 0.02476794 0.0007766783 0.01936465 0.0003814108
## 179 179 0.03234269 0.2117554 0.02476940 0.0007746035 0.01927426 0.0003784234
## 180 180 0.03234021 0.2118519 0.02476530 0.0007755190 0.01935958 0.0003790306
## 181 181 0.03233873 0.2119263 0.02476331 0.0007752000 0.01933310 0.0003778620
## 182 182 0.03233616 0.2120391 0.02476032 0.0007757483 0.01938528 0.0003769974
## 183 183 0.03233569 0.2120546 0.02475855 0.0007783481 0.01943092 0.0003792255
## 184 184 0.03233753 0.2119835 0.02476113 0.0007797579 0.01957251 0.0003800645
## 185 185 0.03233975 0.2119005 0.02476122 0.0007836676 0.01963304 0.0003828298
## 186 186 0.03233793 0.2119818 0.02475900 0.0007854037 0.01967325 0.0003878073
## 187 187 0.03233988 0.2119070 0.02476301 0.0007853405 0.01971118 0.0003898169
## 188 188 0.03233826 0.2119864 0.02476167 0.0007822318 0.01971645 0.0003878753
## 189 189 0.03233869 0.2119851 0.02476208 0.0007790495 0.01979526 0.0003886246
## 190 190 0.03233913 0.2119830 0.02476145 0.0007769012 0.01981784 0.0003870297
## 191 191 0.03233939 0.2119837 0.02476349 0.0007749667 0.01980069 0.0003842887
## 192 192 0.03233969 0.2119785 0.02476486 0.0007771274 0.01973268 0.0003838299
## 193 193 0.03233626 0.2121277 0.02476251 0.0007785295 0.01980909 0.0003830601
## 194 194 0.03233725 0.2120924 0.02476442 0.0007792637 0.01979151 0.0003821241
## 195 195 0.03233693 0.2121129 0.02476304 0.0007779241 0.01968087 0.0003798517
## 196 196 0.03233752 0.2120947 0.02476334 0.0007777482 0.01970085 0.0003795434
## 197 197 0.03233816 0.2120681 0.02476376 0.0007779097 0.01972879 0.0003815971
## 198 198 0.03233695 0.2121201 0.02476186 0.0007773279 0.01969945 0.0003801957
## 199 199 0.03233941 0.2120270 0.02476304 0.0007784648 0.01983056 0.0003800079
## 200 200 0.03233743 0.2121136 0.02476277 0.0007793964 0.01989832 0.0003809556
## 201 201 0.03233789 0.2120856 0.02476214 0.0007772234 0.01979211 0.0003799004
## 202 202 0.03233544 0.2121909 0.02476084 0.0007756651 0.01976029 0.0003788044
## 203 203 0.03233682 0.2121326 0.02476070 0.0007752283 0.01967479 0.0003780675
## 204 204 0.03233694 0.2121254 0.02476099 0.0007769708 0.01964799 0.0003774508
## 205 205 0.03233585 0.2121728 0.02476185 0.0007776237 0.01975332 0.0003778136
## 206 206 0.03233782 0.2120827 0.02476325 0.0007765189 0.01964787 0.0003767862
## 207 207 0.03233816 0.2120633 0.02476386 0.0007750662 0.01958734 0.0003757045
## 208 208 0.03233899 0.2120310 0.02476360 0.0007752742 0.01962379 0.0003755696
## 209 209 0.03233921 0.2120226 0.02476448 0.0007743627 0.01956316 0.0003747725
## 210 210 0.03233793 0.2120789 0.02476356 0.0007740847 0.01955857 0.0003742106
## 211 211 0.03233886 0.2120399 0.02476434 0.0007736823 0.01958487 0.0003722520
## 212 212 0.03234070 0.2119695 0.02476533 0.0007736860 0.01968851 0.0003711108
## 213 213 0.03234147 0.2119384 0.02476545 0.0007746539 0.01970153 0.0003719989
## 214 214 0.03234261 0.2118922 0.02476702 0.0007742604 0.01967221 0.0003727171
## 215 215 0.03234237 0.2118987 0.02476831 0.0007740693 0.01962663 0.0003724871
## 216 216 0.03234121 0.2119489 0.02476638 0.0007735872 0.01965715 0.0003699556
## 217 217 0.03234040 0.2119927 0.02476534 0.0007723776 0.01970442 0.0003680896
## 218 218 0.03233957 0.2120351 0.02476414 0.0007726798 0.01966459 0.0003670780
## 219 219 0.03233937 0.2120419 0.02476435 0.0007730628 0.01964171 0.0003665800
## 220 220 0.03233985 0.2120194 0.02476507 0.0007732996 0.01961400 0.0003665386
## 221 221 0.03233946 0.2120365 0.02476470 0.0007737284 0.01960376 0.0003663927
## 222 222 0.03233955 0.2120339 0.02476491 0.0007752211 0.01962662 0.0003671327
## 223 223 0.03233933 0.2120434 0.02476507 0.0007744084 0.01962756 0.0003666365
## 224 224 0.03233936 0.2120446 0.02476467 0.0007740312 0.01963260 0.0003650174
## 225 225 0.03233932 0.2120462 0.02476512 0.0007739615 0.01962856 0.0003656598
## 226 226 0.03233936 0.2120489 0.02476585 0.0007749216 0.01966310 0.0003665493
## 227 227 0.03233904 0.2120664 0.02476611 0.0007752368 0.01966431 0.0003672464
## 228 228 0.03233879 0.2120782 0.02476617 0.0007748036 0.01964511 0.0003673891
## 229 229 0.03233760 0.2121265 0.02476520 0.0007753045 0.01963838 0.0003685072
## 230 230 0.03233848 0.2120872 0.02476632 0.0007747648 0.01960698 0.0003689050
## 231 231 0.03233849 0.2120847 0.02476654 0.0007744438 0.01961617 0.0003689440
## 232 232 0.03233907 0.2120604 0.02476699 0.0007746850 0.01961612 0.0003689958
## 233 233 0.03233896 0.2120633 0.02476704 0.0007747891 0.01960100 0.0003693248
## 234 234 0.03233894 0.2120640 0.02476691 0.0007748588 0.01958655 0.0003695165
## 235 235 0.03233926 0.2120501 0.02476706 0.0007746337 0.01958916 0.0003694871
## 236 236 0.03233938 0.2120450 0.02476702 0.0007749339 0.01958872 0.0003693825
## 237 237 0.03233941 0.2120437 0.02476691 0.0007748246 0.01958728 0.0003692516
## 238 238 0.03233954 0.2120388 0.02476691 0.0007749659 0.01958784 0.0003693548
## 239 239 0.03233954 0.2120394 0.02476685 0.0007750095 0.01958734 0.0003694222
## 240 240 0.03233956 0.2120384 0.02476680 0.0007749933 0.01958899 0.0003693663
## [1] "Best Model"
## nvmax
## 15 15
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.963910e+00 1.947791e+00 1.980030e+00
## x4 -5.353377e-05 -7.102358e-05 -3.604396e-05
## x7 1.149425e-02 1.026902e-02 1.271949e-02
## x8 4.293628e-04 1.427311e-04 7.159945e-04
## x9 3.324791e-03 2.685469e-03 3.964112e-03
## x10 1.337977e-03 7.438113e-04 1.932143e-03
## x11 2.291277e+05 8.705589e+04 3.711996e+05
## x16 9.873956e-04 5.758899e-04 1.398901e-03
## x17 1.719254e-03 1.089382e-03 2.349127e-03
## x21 1.404906e-04 5.914971e-05 2.218316e-04
## stat14 -9.651107e-04 -1.438841e-03 -4.913807e-04
## stat98 3.672725e-03 3.201860e-03 4.143589e-03
## stat100 7.167091e-04 2.372289e-04 1.196189e-03
## stat110 -3.313066e-03 -3.787795e-03 -2.838337e-03
## stat146 -7.552975e-04 -1.235627e-03 -2.749676e-04
## x18.sqrt 2.640688e-02 2.459080e-02 2.822297e-02
if (algo.forward.caret == TRUE){
test.model(model.forward, data.test
,method = 'leapForward',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.032 2.085 2.097 2.097 2.110 2.149
## [1] "leapForward Test MSE: 0.000992395717031664"
if (algo.backward.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "leapBackward"
,feature.names = feature.names)
model.backward = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 15 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03426021 0.1102603 0.02678506 0.0007528041 0.02129231 0.0004213658
## 2 2 0.03341255 0.1548136 0.02608465 0.0009549896 0.03477939 0.0005441361
## 3 3 0.03278951 0.1857579 0.02547103 0.0009056890 0.03288428 0.0004986653
## 4 4 0.03227713 0.2113517 0.02470874 0.0008645806 0.03734399 0.0005939377
## 5 5 0.03198524 0.2249611 0.02451264 0.0009072163 0.03436803 0.0005425191
## 6 6 0.03196756 0.2257863 0.02450055 0.0009147022 0.03323145 0.0005522053
## 7 7 0.03186275 0.2306455 0.02443907 0.0008841335 0.03173780 0.0005201270
## 8 8 0.03180485 0.2333632 0.02440347 0.0008909229 0.03190063 0.0004926743
## 9 9 0.03174677 0.2360753 0.02438467 0.0009075872 0.03063354 0.0004965026
## 10 10 0.03168841 0.2387674 0.02432430 0.0008999805 0.02990088 0.0004865291
## 11 11 0.03171484 0.2374598 0.02436301 0.0008726894 0.02848580 0.0004481966
## 12 12 0.03171113 0.2375547 0.02437469 0.0008600439 0.02677835 0.0004351152
## 13 13 0.03167366 0.2392646 0.02432044 0.0008373043 0.02565854 0.0004341691
## 14 14 0.03166420 0.2397724 0.02430440 0.0008508276 0.02621653 0.0004402137
## 15 15 0.03163949 0.2409469 0.02429082 0.0008593315 0.02618453 0.0004442920
## 16 16 0.03164473 0.2406580 0.02428619 0.0008393763 0.02519951 0.0004486484
## 17 17 0.03164325 0.2407726 0.02427935 0.0008673947 0.02562879 0.0004463349
## 18 18 0.03164729 0.2405747 0.02429372 0.0008485749 0.02464523 0.0004330501
## 19 19 0.03168841 0.2386146 0.02432800 0.0008264120 0.02355733 0.0004210316
## 20 20 0.03171110 0.2375644 0.02434183 0.0008271752 0.02314372 0.0004230877
## 21 21 0.03173476 0.2365096 0.02436339 0.0008313945 0.02379484 0.0004333460
## 22 22 0.03175516 0.2355267 0.02439250 0.0008338561 0.02314498 0.0004237346
## 23 23 0.03176916 0.2349201 0.02440245 0.0008327500 0.02340274 0.0004204588
## 24 24 0.03177493 0.2347050 0.02441004 0.0008409963 0.02345496 0.0004257067
## 25 25 0.03178287 0.2343685 0.02441062 0.0008308875 0.02322537 0.0004134362
## 26 26 0.03179619 0.2337741 0.02441438 0.0008324826 0.02359294 0.0004231466
## 27 27 0.03180761 0.2332725 0.02443488 0.0008366801 0.02383342 0.0004217597
## 28 28 0.03182861 0.2323548 0.02443680 0.0008191934 0.02423088 0.0004206251
## 29 29 0.03183561 0.2320413 0.02444476 0.0008105384 0.02410549 0.0004082149
## 30 30 0.03185024 0.2313653 0.02444863 0.0008014904 0.02384830 0.0003856504
## 31 31 0.03185283 0.2312528 0.02444723 0.0007969173 0.02329775 0.0003918088
## 32 32 0.03186868 0.2305862 0.02445835 0.0007905404 0.02396391 0.0003921945
## 33 33 0.03188780 0.2297477 0.02446460 0.0007865565 0.02494340 0.0003973655
## 34 34 0.03189182 0.2295659 0.02447159 0.0007800004 0.02445720 0.0003936162
## 35 35 0.03189635 0.2294232 0.02447373 0.0007769604 0.02419877 0.0003929271
## 36 36 0.03189436 0.2295445 0.02447257 0.0007825188 0.02422240 0.0004090803
## 37 37 0.03190115 0.2292838 0.02447025 0.0007962640 0.02501735 0.0004136543
## 38 38 0.03191729 0.2285613 0.02448650 0.0007798285 0.02474520 0.0004087816
## 39 39 0.03193203 0.2279545 0.02450272 0.0007911723 0.02523623 0.0004108918
## 40 40 0.03194654 0.2273048 0.02451757 0.0008070040 0.02495350 0.0004191922
## 41 41 0.03196409 0.2265445 0.02453187 0.0007934827 0.02489399 0.0004165233
## 42 42 0.03197647 0.2260333 0.02454319 0.0007800708 0.02450155 0.0003986890
## 43 43 0.03199466 0.2252299 0.02455340 0.0007760666 0.02488404 0.0003996308
## 44 44 0.03199553 0.2252195 0.02455370 0.0007800897 0.02426066 0.0003987086
## 45 45 0.03200684 0.2247222 0.02455942 0.0007866806 0.02413003 0.0004041298
## 46 46 0.03201926 0.2241588 0.02456604 0.0008058421 0.02408938 0.0004037689
## 47 47 0.03202895 0.2238202 0.02456513 0.0008116412 0.02421970 0.0004063187
## 48 48 0.03203295 0.2236481 0.02457481 0.0008234586 0.02467805 0.0004161632
## 49 49 0.03204358 0.2231107 0.02458470 0.0008305108 0.02444026 0.0004103283
## 50 50 0.03204396 0.2231292 0.02458094 0.0008357337 0.02463767 0.0004295078
## 51 51 0.03204944 0.2229014 0.02458762 0.0008345093 0.02471264 0.0004198666
## 52 52 0.03205649 0.2226194 0.02459771 0.0008372992 0.02479030 0.0004143381
## 53 53 0.03206633 0.2221715 0.02461110 0.0008425004 0.02416791 0.0004068308
## 54 54 0.03207086 0.2220109 0.02461044 0.0008455208 0.02406010 0.0004139518
## 55 55 0.03209045 0.2211443 0.02462760 0.0008490833 0.02360976 0.0004138940
## 56 56 0.03209355 0.2210285 0.02462680 0.0008406705 0.02330764 0.0004203771
## 57 57 0.03210284 0.2206265 0.02463840 0.0008396013 0.02309222 0.0004277604
## 58 58 0.03210678 0.2204901 0.02464182 0.0008462618 0.02325108 0.0004281123
## 59 59 0.03211087 0.2203377 0.02464786 0.0008337902 0.02314324 0.0004185129
## 60 60 0.03212955 0.2195623 0.02466962 0.0008401360 0.02382235 0.0004296303
## 61 61 0.03213795 0.2191938 0.02468202 0.0008398019 0.02377118 0.0004253069
## 62 62 0.03213857 0.2191469 0.02468521 0.0008385798 0.02330676 0.0004225637
## 63 63 0.03214133 0.2190488 0.02468236 0.0008326046 0.02322222 0.0004161557
## 64 64 0.03215795 0.2183417 0.02469250 0.0008267838 0.02311866 0.0004076196
## 65 65 0.03216192 0.2182107 0.02469294 0.0008255965 0.02299553 0.0004043456
## 66 66 0.03214799 0.2188766 0.02467444 0.0008242654 0.02325083 0.0004064395
## 67 67 0.03214659 0.2189562 0.02466740 0.0008265687 0.02256327 0.0004074907
## 68 68 0.03214538 0.2190359 0.02466483 0.0008332120 0.02217889 0.0004146178
## 69 69 0.03215152 0.2187662 0.02466459 0.0008288934 0.02217275 0.0004056444
## 70 70 0.03214592 0.2190486 0.02465228 0.0008323475 0.02234042 0.0004085425
## 71 71 0.03214800 0.2190132 0.02463969 0.0008270404 0.02216514 0.0004082876
## 72 72 0.03215669 0.2186437 0.02465100 0.0008331795 0.02256193 0.0004147454
## 73 73 0.03215367 0.2187952 0.02464946 0.0008283953 0.02266707 0.0004099737
## 74 74 0.03215676 0.2186839 0.02464656 0.0008280168 0.02276448 0.0004166444
## 75 75 0.03216800 0.2182089 0.02465942 0.0008251146 0.02284854 0.0004205164
## 76 76 0.03217114 0.2181262 0.02466287 0.0008170117 0.02249246 0.0004150882
## 77 77 0.03217437 0.2180012 0.02465861 0.0008186589 0.02252204 0.0004200170
## 78 78 0.03218105 0.2177573 0.02466331 0.0008076583 0.02224158 0.0004185362
## 79 79 0.03219504 0.2172269 0.02467302 0.0008213984 0.02258789 0.0004336141
## 80 80 0.03219841 0.2170796 0.02467804 0.0008251214 0.02289822 0.0004288977
## 81 81 0.03219585 0.2171968 0.02468240 0.0008186735 0.02215611 0.0004144227
## 82 82 0.03220395 0.2168671 0.02468516 0.0008168400 0.02230277 0.0004243540
## 83 83 0.03222304 0.2160394 0.02470337 0.0008181163 0.02203061 0.0004221431
## 84 84 0.03221695 0.2162883 0.02469343 0.0008160227 0.02214504 0.0004252315
## 85 85 0.03220496 0.2168054 0.02468591 0.0008129163 0.02198156 0.0004209713
## 86 86 0.03221559 0.2163283 0.02469734 0.0008149312 0.02199863 0.0004169186
## 87 87 0.03221269 0.2164886 0.02469454 0.0008224955 0.02220621 0.0004188990
## 88 88 0.03222062 0.2161359 0.02469525 0.0008238182 0.02199060 0.0004249613
## 89 89 0.03222288 0.2160442 0.02469390 0.0008185740 0.02211243 0.0004161867
## 90 90 0.03222214 0.2160936 0.02469855 0.0008199664 0.02190726 0.0004229266
## 91 91 0.03222757 0.2158600 0.02469933 0.0008165584 0.02139752 0.0004072534
## 92 92 0.03222759 0.2158808 0.02469633 0.0008157224 0.02125837 0.0004090356
## 93 93 0.03222896 0.2158286 0.02469762 0.0008178038 0.02124651 0.0004123202
## 94 94 0.03222623 0.2159786 0.02469450 0.0008236799 0.02138536 0.0004154254
## 95 95 0.03223989 0.2153981 0.02470226 0.0008161023 0.02090257 0.0004067268
## 96 96 0.03223614 0.2156159 0.02469504 0.0008127834 0.02111494 0.0003988606
## 97 97 0.03222965 0.2159216 0.02469790 0.0008124155 0.02121129 0.0004023824
## 98 98 0.03222647 0.2160474 0.02470231 0.0008095708 0.02115862 0.0003986178
## 99 99 0.03222691 0.2160717 0.02470702 0.0008181810 0.02130720 0.0003985245
## 100 100 0.03223677 0.2156877 0.02471405 0.0008196077 0.02133149 0.0004032755
## 101 101 0.03223593 0.2157674 0.02471611 0.0008153188 0.02147614 0.0004146521
## 102 102 0.03223942 0.2156195 0.02472296 0.0008150733 0.02141926 0.0004164598
## 103 103 0.03223904 0.2156575 0.02472045 0.0008114829 0.02148960 0.0004236423
## 104 104 0.03223530 0.2157903 0.02471805 0.0008037586 0.02110610 0.0004227198
## 105 105 0.03223714 0.2157067 0.02471072 0.0007989645 0.02064543 0.0004159000
## 106 106 0.03224091 0.2155593 0.02471505 0.0007953940 0.02091938 0.0004069479
## 107 107 0.03224115 0.2155315 0.02471475 0.0007883926 0.02082095 0.0004043583
## 108 108 0.03224963 0.2151795 0.02471841 0.0007859827 0.02087297 0.0004103059
## 109 109 0.03224150 0.2155517 0.02471180 0.0007803654 0.02105676 0.0004156144
## 110 110 0.03224807 0.2152743 0.02471472 0.0007841279 0.02081732 0.0004158234
## 111 111 0.03226127 0.2147428 0.02472254 0.0007876026 0.02085142 0.0004213654
## 112 112 0.03225963 0.2148179 0.02471901 0.0007838718 0.02069491 0.0004180054
## 113 113 0.03226603 0.2145443 0.02471765 0.0007919620 0.02057552 0.0004199706
## 114 114 0.03226207 0.2147422 0.02471211 0.0007903734 0.02082164 0.0004143053
## 115 115 0.03226587 0.2145890 0.02471300 0.0007906578 0.02074922 0.0004116694
## 116 116 0.03226900 0.2144841 0.02471028 0.0007939644 0.02062369 0.0004081307
## 117 117 0.03227299 0.2143127 0.02471580 0.0007951192 0.02057980 0.0004054444
## 118 118 0.03226812 0.2145047 0.02471142 0.0007888751 0.02034675 0.0004046123
## 119 119 0.03227017 0.2144148 0.02470817 0.0007899478 0.02014245 0.0003997822
## 120 120 0.03226998 0.2143998 0.02470961 0.0007845679 0.01974533 0.0003917600
## 121 121 0.03228581 0.2137869 0.02472333 0.0007908302 0.02024558 0.0003883268
## 122 122 0.03227883 0.2140971 0.02471963 0.0007876860 0.02016197 0.0003900738
## 123 123 0.03228385 0.2139087 0.02472217 0.0007771444 0.02004026 0.0003816897
## 124 124 0.03227676 0.2142409 0.02471913 0.0007864519 0.02024998 0.0003903156
## 125 125 0.03227221 0.2144342 0.02471769 0.0007823018 0.02021296 0.0003857499
## 126 126 0.03227998 0.2141133 0.02472443 0.0007780603 0.01989196 0.0003837035
## 127 127 0.03227727 0.2142413 0.02472841 0.0007792930 0.02004828 0.0003796874
## 128 128 0.03228079 0.2140966 0.02473466 0.0007830192 0.02027311 0.0003847867
## 129 129 0.03228359 0.2139677 0.02473331 0.0007833062 0.02029018 0.0003819154
## 130 130 0.03228984 0.2137296 0.02473777 0.0007896304 0.02040915 0.0003944726
## 131 131 0.03228794 0.2138000 0.02473663 0.0007799018 0.01998886 0.0003858585
## 132 132 0.03229269 0.2136181 0.02473696 0.0007800528 0.02004915 0.0003879103
## 133 133 0.03229383 0.2136224 0.02473808 0.0007844891 0.02042456 0.0003892024
## 134 134 0.03229410 0.2136440 0.02474010 0.0007856311 0.02057084 0.0003909050
## 135 135 0.03229672 0.2135407 0.02474164 0.0007889407 0.02074911 0.0003978830
## 136 136 0.03229211 0.2137606 0.02473849 0.0007912750 0.02093370 0.0003992342
## 137 137 0.03229019 0.2138309 0.02473255 0.0007912184 0.02081773 0.0004056778
## 138 138 0.03229006 0.2138463 0.02473012 0.0007868169 0.02053301 0.0003995370
## 139 139 0.03228993 0.2138655 0.02473099 0.0007929037 0.02073171 0.0003987834
## 140 140 0.03229295 0.2137530 0.02473165 0.0007897114 0.02096085 0.0003908186
## 141 141 0.03230392 0.2132728 0.02473794 0.0007821096 0.02067601 0.0003872118
## 142 142 0.03229978 0.2134559 0.02473500 0.0007876005 0.02063485 0.0003968913
## 143 143 0.03230264 0.2132873 0.02473611 0.0007881398 0.02055559 0.0003934592
## 144 144 0.03230922 0.2129977 0.02474082 0.0007845742 0.02030030 0.0003961371
## 145 145 0.03231024 0.2129393 0.02474155 0.0007778682 0.02002050 0.0003968329
## 146 146 0.03231348 0.2128216 0.02474998 0.0007731435 0.01973545 0.0003936839
## 147 147 0.03231824 0.2126225 0.02475263 0.0007713733 0.01975078 0.0003889733
## 148 148 0.03232421 0.2123780 0.02475518 0.0007715684 0.01975498 0.0003913077
## 149 149 0.03232164 0.2124678 0.02475504 0.0007786404 0.01984642 0.0003918223
## 150 150 0.03232341 0.2124152 0.02475321 0.0007784563 0.01980779 0.0003854496
## 151 151 0.03232250 0.2124679 0.02475735 0.0007755318 0.01973154 0.0003899907
## 152 152 0.03232340 0.2124386 0.02476056 0.0007758978 0.01972476 0.0003850567
## 153 153 0.03232601 0.2123290 0.02475885 0.0007816456 0.01971407 0.0003829650
## 154 154 0.03232358 0.2124208 0.02475399 0.0007790325 0.01965950 0.0003748283
## 155 155 0.03231738 0.2126993 0.02474538 0.0007838250 0.01969428 0.0003754770
## 156 156 0.03231566 0.2127792 0.02474461 0.0007815400 0.01934242 0.0003727855
## 157 157 0.03232176 0.2125167 0.02475097 0.0007781833 0.01928881 0.0003729557
## 158 158 0.03231541 0.2128088 0.02474578 0.0007800208 0.01952148 0.0003751752
## 159 159 0.03231252 0.2129431 0.02474522 0.0007855872 0.01972332 0.0003793809
## 160 160 0.03231342 0.2129182 0.02474597 0.0007832039 0.01974340 0.0003785855
## 161 161 0.03231785 0.2127288 0.02474841 0.0007890005 0.01961327 0.0003860520
## 162 162 0.03232130 0.2125981 0.02475254 0.0007902053 0.01957500 0.0003868808
## 163 163 0.03232328 0.2125303 0.02475429 0.0007923755 0.01967038 0.0003841165
## 164 164 0.03232661 0.2124154 0.02475586 0.0007898913 0.01963139 0.0003801395
## 165 165 0.03232735 0.2123675 0.02476103 0.0007890430 0.01959355 0.0003799296
## 166 166 0.03232542 0.2124570 0.02476035 0.0007857457 0.01944702 0.0003782942
## 167 167 0.03232668 0.2124257 0.02475974 0.0007831935 0.01939707 0.0003747444
## 168 168 0.03232830 0.2123689 0.02475897 0.0007830951 0.01938223 0.0003727361
## 169 169 0.03233531 0.2120738 0.02476242 0.0007800110 0.01926721 0.0003693912
## 170 170 0.03233898 0.2119301 0.02476435 0.0007795049 0.01915034 0.0003705776
## 171 171 0.03233935 0.2118858 0.02476795 0.0007806651 0.01918395 0.0003741178
## 172 172 0.03233787 0.2119802 0.02476830 0.0007791931 0.01928464 0.0003746573
## 173 173 0.03234196 0.2118103 0.02477218 0.0007815114 0.01943568 0.0003741463
## 174 174 0.03233891 0.2119309 0.02477328 0.0007771914 0.01944670 0.0003743095
## 175 175 0.03233663 0.2120195 0.02476800 0.0007781725 0.01942572 0.0003762304
## 176 176 0.03233992 0.2118997 0.02477200 0.0007798896 0.01942896 0.0003769328
## 177 177 0.03233822 0.2119690 0.02477014 0.0007783717 0.01945596 0.0003792569
## 178 178 0.03234082 0.2118552 0.02476927 0.0007766635 0.01954599 0.0003830463
## 179 179 0.03234123 0.2118270 0.02476923 0.0007769566 0.01939478 0.0003777740
## 180 180 0.03233604 0.2120356 0.02476323 0.0007790026 0.01938293 0.0003780981
## 181 181 0.03233477 0.2121116 0.02475974 0.0007788330 0.01943100 0.0003759096
## 182 182 0.03233661 0.2120389 0.02476091 0.0007788404 0.01948532 0.0003778095
## 183 183 0.03233805 0.2119756 0.02476209 0.0007798531 0.01953488 0.0003798196
## 184 184 0.03234024 0.2118879 0.02476323 0.0007819545 0.01972025 0.0003812310
## 185 185 0.03233925 0.2119249 0.02476006 0.0007846495 0.01962482 0.0003824769
## 186 186 0.03233878 0.2119479 0.02475800 0.0007862196 0.01971964 0.0003875352
## 187 187 0.03233988 0.2119070 0.02476301 0.0007853405 0.01971118 0.0003898169
## 188 188 0.03233826 0.2119864 0.02476167 0.0007822318 0.01971645 0.0003878753
## 189 189 0.03233869 0.2119851 0.02476208 0.0007790495 0.01979526 0.0003886246
## 190 190 0.03233878 0.2119936 0.02476119 0.0007778321 0.01979143 0.0003870488
## 191 191 0.03233601 0.2121162 0.02476120 0.0007756188 0.01964909 0.0003848292
## 192 192 0.03233932 0.2119813 0.02476415 0.0007774001 0.01972553 0.0003842260
## 193 193 0.03233693 0.2120958 0.02476280 0.0007791111 0.01983366 0.0003829389
## 194 194 0.03233795 0.2120619 0.02476496 0.0007798593 0.01981442 0.0003818863
## 195 195 0.03233664 0.2121282 0.02476264 0.0007776706 0.01966919 0.0003800219
## 196 196 0.03233716 0.2121042 0.02476298 0.0007773025 0.01967624 0.0003784888
## 197 197 0.03233956 0.2120110 0.02476495 0.0007770292 0.01975243 0.0003798745
## 198 198 0.03233761 0.2120966 0.02476298 0.0007766258 0.01976250 0.0003790415
## 199 199 0.03233926 0.2120381 0.02476327 0.0007782155 0.01986980 0.0003800747
## 200 200 0.03233858 0.2120665 0.02476247 0.0007793616 0.01982402 0.0003805686
## 201 201 0.03233833 0.2120648 0.02476182 0.0007774234 0.01973778 0.0003797530
## 202 202 0.03233658 0.2121409 0.02476098 0.0007757364 0.01970815 0.0003784589
## 203 203 0.03233725 0.2121165 0.02476076 0.0007750473 0.01969490 0.0003780707
## 204 204 0.03233819 0.2120761 0.02476247 0.0007763502 0.01970829 0.0003775549
## 205 205 0.03233659 0.2121419 0.02476230 0.0007768303 0.01976253 0.0003779411
## 206 206 0.03233782 0.2120827 0.02476325 0.0007765189 0.01964787 0.0003767862
## 207 207 0.03233816 0.2120633 0.02476386 0.0007750662 0.01958734 0.0003757045
## 208 208 0.03233899 0.2120310 0.02476360 0.0007752742 0.01962379 0.0003755696
## 209 209 0.03233974 0.2119998 0.02476558 0.0007737821 0.01956989 0.0003750736
## 210 210 0.03233844 0.2120568 0.02476407 0.0007735364 0.01956523 0.0003743565
## 211 211 0.03233886 0.2120399 0.02476434 0.0007736823 0.01958487 0.0003722520
## 212 212 0.03234025 0.2119892 0.02476487 0.0007730586 0.01965520 0.0003707717
## 213 213 0.03234065 0.2119714 0.02476536 0.0007737219 0.01965907 0.0003715822
## 214 214 0.03234225 0.2119060 0.02476729 0.0007739540 0.01966213 0.0003725923
## 215 215 0.03234288 0.2118850 0.02476799 0.0007738735 0.01964904 0.0003723367
## 216 216 0.03234188 0.2119303 0.02476663 0.0007732229 0.01968127 0.0003699761
## 217 217 0.03234052 0.2119937 0.02476534 0.0007722439 0.01970410 0.0003680921
## 218 218 0.03233972 0.2120305 0.02476467 0.0007725122 0.01966596 0.0003672435
## 219 219 0.03233932 0.2120445 0.02476443 0.0007729789 0.01963969 0.0003667033
## 220 220 0.03233980 0.2120221 0.02476513 0.0007732091 0.01961187 0.0003666285
## 221 221 0.03233946 0.2120365 0.02476470 0.0007737284 0.01960376 0.0003663927
## 222 222 0.03233968 0.2120298 0.02476509 0.0007753340 0.01962974 0.0003672881
## 223 223 0.03233946 0.2120410 0.02476524 0.0007745236 0.01963246 0.0003667865
## 224 224 0.03233949 0.2120405 0.02476484 0.0007741442 0.01963567 0.0003651737
## 225 225 0.03233932 0.2120462 0.02476512 0.0007739615 0.01962856 0.0003656598
## 226 226 0.03233936 0.2120489 0.02476585 0.0007749216 0.01966310 0.0003665493
## 227 227 0.03233904 0.2120664 0.02476611 0.0007752368 0.01966431 0.0003672464
## 228 228 0.03233879 0.2120782 0.02476617 0.0007748036 0.01964511 0.0003673891
## 229 229 0.03233760 0.2121265 0.02476520 0.0007753045 0.01963838 0.0003685072
## 230 230 0.03233848 0.2120872 0.02476632 0.0007747648 0.01960698 0.0003689050
## 231 231 0.03233849 0.2120847 0.02476654 0.0007744438 0.01961617 0.0003689440
## 232 232 0.03233907 0.2120604 0.02476699 0.0007746850 0.01961612 0.0003689958
## 233 233 0.03233896 0.2120633 0.02476704 0.0007747891 0.01960100 0.0003693248
## 234 234 0.03233894 0.2120640 0.02476691 0.0007748588 0.01958655 0.0003695165
## 235 235 0.03233926 0.2120501 0.02476706 0.0007746337 0.01958916 0.0003694871
## 236 236 0.03233938 0.2120450 0.02476702 0.0007749339 0.01958872 0.0003693825
## 237 237 0.03233941 0.2120437 0.02476691 0.0007748246 0.01958728 0.0003692516
## 238 238 0.03233954 0.2120388 0.02476691 0.0007749659 0.01958784 0.0003693548
## 239 239 0.03233954 0.2120394 0.02476685 0.0007750095 0.01958734 0.0003694222
## 240 240 0.03233956 0.2120384 0.02476680 0.0007749933 0.01958899 0.0003693663
## [1] "Best Model"
## nvmax
## 15 15
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.963910e+00 1.947791e+00 1.980030e+00
## x4 -5.353377e-05 -7.102358e-05 -3.604396e-05
## x7 1.149425e-02 1.026902e-02 1.271949e-02
## x8 4.293628e-04 1.427311e-04 7.159945e-04
## x9 3.324791e-03 2.685469e-03 3.964112e-03
## x10 1.337977e-03 7.438113e-04 1.932143e-03
## x11 2.291277e+05 8.705589e+04 3.711996e+05
## x16 9.873956e-04 5.758899e-04 1.398901e-03
## x17 1.719254e-03 1.089382e-03 2.349127e-03
## x21 1.404906e-04 5.914971e-05 2.218316e-04
## stat14 -9.651107e-04 -1.438841e-03 -4.913807e-04
## stat98 3.672725e-03 3.201860e-03 4.143589e-03
## stat100 7.167091e-04 2.372289e-04 1.196189e-03
## stat110 -3.313066e-03 -3.787795e-03 -2.838337e-03
## stat146 -7.552975e-04 -1.235627e-03 -2.749676e-04
## x18.sqrt 2.640688e-02 2.459080e-02 2.822297e-02
if (algo.backward.caret == TRUE){
test.model(model.backward, data.test
,method = 'leapBackward',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.032 2.085 2.097 2.097 2.110 2.149
## [1] "leapBackward Test MSE: 0.000992395717031665"
if (algo.stepwise.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "leapSeq"
,feature.names = feature.names)
model.stepwise = returned$model
id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 18 on full training set
## [1] "All models results"
## nvmax RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.03426021 0.1102603 0.02678506 0.0007528041 0.02129231 0.0004213658
## 2 2 0.03341255 0.1548136 0.02608465 0.0009549896 0.03477939 0.0005441361
## 3 3 0.03278951 0.1857579 0.02547103 0.0009056890 0.03288428 0.0004986653
## 4 4 0.03227713 0.2113517 0.02470874 0.0008645806 0.03734399 0.0005939377
## 5 5 0.03198524 0.2249611 0.02451264 0.0009072163 0.03436803 0.0005425191
## 6 6 0.03196756 0.2257863 0.02450055 0.0009147022 0.03323145 0.0005522053
## 7 7 0.03186275 0.2306455 0.02443907 0.0008841335 0.03173780 0.0005201270
## 8 8 0.03180485 0.2333632 0.02440347 0.0008909229 0.03190063 0.0004926743
## 9 9 0.03174677 0.2360753 0.02438467 0.0009075872 0.03063354 0.0004965026
## 10 10 0.03168841 0.2387674 0.02432430 0.0008999805 0.02990088 0.0004865291
## 11 11 0.03169907 0.2381655 0.02434296 0.0008523372 0.02756473 0.0004382474
## 12 12 0.03171113 0.2375547 0.02437469 0.0008600439 0.02677835 0.0004351152
## 13 13 0.03200490 0.2226034 0.02465558 0.0015671846 0.06681294 0.0012041844
## 14 14 0.03166420 0.2397724 0.02430440 0.0008508276 0.02621653 0.0004402137
## 15 15 0.03163949 0.2409469 0.02429082 0.0008593315 0.02618453 0.0004442920
## 16 16 0.03165164 0.2403430 0.02429781 0.0008321322 0.02526408 0.0004478658
## 17 17 0.03165851 0.2400597 0.02429486 0.0008510357 0.02571485 0.0004432393
## 18 18 0.03163761 0.2410311 0.02428919 0.0008637303 0.02482473 0.0004450328
## 19 19 0.03197312 0.2235867 0.02457625 0.0010062941 0.05516598 0.0008752393
## 20 20 0.03197949 0.2233679 0.02455323 0.0007454009 0.04829339 0.0004830157
## 21 21 0.03173476 0.2365096 0.02436339 0.0008313945 0.02379484 0.0004333460
## 22 22 0.03207813 0.2191086 0.02462715 0.0012410605 0.05560380 0.0008145365
## 23 23 0.03199136 0.2232666 0.02457199 0.0010264215 0.05081490 0.0006865414
## 24 24 0.03179461 0.2338135 0.02443553 0.0008431137 0.02463055 0.0004305614
## 25 25 0.03211150 0.2179468 0.02475889 0.0015164303 0.06348317 0.0011722005
## 26 26 0.03221701 0.2130780 0.02479872 0.0014641065 0.05078950 0.0010579248
## 27 27 0.03254169 0.1970330 0.02509424 0.0020046793 0.08399034 0.0015117859
## 28 28 0.03204653 0.2208775 0.02458339 0.0010147819 0.05023934 0.0006740563
## 29 29 0.03213600 0.2172938 0.02466175 0.0012303719 0.04623564 0.0009051064
## 30 30 0.03185880 0.2309478 0.02445404 0.0008050190 0.02355782 0.0004140711
## 31 31 0.03255911 0.1964859 0.02501215 0.0019301170 0.07381269 0.0014170684
## 32 32 0.03212428 0.2169001 0.02466072 0.0006895764 0.04567243 0.0004610042
## 33 33 0.03220379 0.2138838 0.02480333 0.0014706052 0.06240923 0.0011672366
## 34 34 0.03219455 0.2146988 0.02469640 0.0011895860 0.04543765 0.0008999051
## 35 35 0.03220277 0.2137855 0.02470765 0.0011586550 0.05356909 0.0007337005
## 36 36 0.03189655 0.2294505 0.02447863 0.0007776587 0.02417592 0.0003962460
## 37 37 0.03247505 0.2000156 0.02493592 0.0012233887 0.06216365 0.0011193100
## 38 38 0.03212955 0.2174295 0.02464398 0.0009451173 0.04812882 0.0006310883
## 39 39 0.03193991 0.2275755 0.02451044 0.0007860591 0.02475719 0.0004125949
## 40 40 0.03279218 0.1845538 0.02520984 0.0015595446 0.07505150 0.0012251982
## 41 41 0.03297090 0.1763319 0.02534927 0.0018918078 0.07233713 0.0013448120
## 42 42 0.03226772 0.2117584 0.02475508 0.0011518058 0.04379280 0.0008795168
## 43 43 0.03237681 0.2064568 0.02485839 0.0014978001 0.06003528 0.0010975392
## 44 44 0.03198733 0.2255056 0.02455071 0.0007911748 0.02407855 0.0003831792
## 45 45 0.03269079 0.1908050 0.02512984 0.0018473723 0.06592762 0.0013367278
## 46 46 0.03258664 0.1944599 0.02512349 0.0014798619 0.07429498 0.0012964680
## 47 47 0.03280184 0.1857338 0.02522154 0.0018265261 0.06766167 0.0013456653
## 48 48 0.03240412 0.2053495 0.02487957 0.0015082994 0.05907608 0.0010770392
## 49 49 0.03306443 0.1722415 0.02548384 0.0018870053 0.07797897 0.0013405949
## 50 50 0.03231960 0.2090136 0.02490663 0.0014235201 0.05912435 0.0011156091
## 51 51 0.03223824 0.2127324 0.02471612 0.0009529229 0.04624799 0.0006011677
## 52 52 0.03269420 0.1907087 0.02520216 0.0018366052 0.07573378 0.0014375256
## 53 53 0.03243561 0.2040980 0.02490065 0.0015107592 0.05835313 0.0010870623
## 54 54 0.03206740 0.2221724 0.02459821 0.0008375350 0.02384293 0.0004094759
## 55 55 0.03258365 0.1955333 0.02497689 0.0011663929 0.05983910 0.0007728523
## 56 56 0.03274163 0.1886948 0.02523539 0.0018417423 0.07509640 0.0014429258
## 57 57 0.03258818 0.1947715 0.02498901 0.0013691681 0.06271333 0.0009846771
## 58 58 0.03248161 0.2022654 0.02493994 0.0015162427 0.05819785 0.0011142514
## 59 59 0.03262598 0.1921229 0.02502533 0.0008687430 0.06513372 0.0008551064
## 60 60 0.03234406 0.2070991 0.02479318 0.0007612227 0.04675897 0.0004432228
## 61 61 0.03239393 0.2063494 0.02485972 0.0011924040 0.04401910 0.0008771546
## 62 62 0.03264875 0.1920569 0.02501805 0.0010672001 0.05569341 0.0008411652
## 63 63 0.03298012 0.1763828 0.02543633 0.0017095108 0.07193430 0.0013237338
## 64 64 0.03244520 0.2037523 0.02488181 0.0010985446 0.04838627 0.0006467292
## 65 65 0.03238378 0.2054057 0.02481080 0.0007346983 0.04570533 0.0004117181
## 66 66 0.03335947 0.1571987 0.02584053 0.0019664703 0.08721492 0.0015311205
## 67 67 0.03267983 0.1905481 0.02503200 0.0011062002 0.05768050 0.0008720779
## 68 68 0.03251663 0.2007399 0.02500186 0.0014686752 0.05134131 0.0010346187
## 69 69 0.03292308 0.1789223 0.02530331 0.0015784799 0.07703464 0.0011256445
## 70 70 0.03256637 0.1954343 0.02491202 0.0008032307 0.05747340 0.0005690655
## 71 71 0.03254543 0.1995308 0.02500215 0.0014654666 0.05105647 0.0010436991
## 72 72 0.03266215 0.1912635 0.02509910 0.0012998122 0.06641083 0.0010147541
## 73 73 0.03263868 0.1936076 0.02507851 0.0015135988 0.07019458 0.0011064970
## 74 74 0.03269599 0.1896959 0.02501854 0.0009502313 0.05783864 0.0005993396
## 75 75 0.03216933 0.2181565 0.02466650 0.0008371304 0.02288286 0.0004264993
## 76 76 0.03247948 0.2026533 0.02489938 0.0012389126 0.04682898 0.0009391196
## 77 77 0.03253924 0.2000993 0.02496354 0.0014734364 0.05657768 0.0011169470
## 78 78 0.03236338 0.2076091 0.02478868 0.0009211083 0.04492778 0.0006141751
## 79 79 0.03285701 0.1843417 0.02527825 0.0018443851 0.07626370 0.0013896952
## 80 80 0.03279920 0.1868887 0.02520890 0.0016849908 0.07048863 0.0012799010
## 81 81 0.03371680 0.1397710 0.02591576 0.0016123702 0.07480412 0.0013564416
## 82 82 0.03311640 0.1694614 0.02539313 0.0014088150 0.06614297 0.0010217000
## 83 83 0.03247785 0.2022275 0.02490950 0.0009362179 0.05125990 0.0008480185
## 84 84 0.03304077 0.1742065 0.02541946 0.0017529953 0.07708364 0.0013807392
## 85 85 0.03220692 0.2167420 0.02469054 0.0008017377 0.02183547 0.0004135169
## 86 86 0.03221508 0.2163657 0.02470543 0.0008019339 0.02178429 0.0004046968
## 87 87 0.03222149 0.2161042 0.02470887 0.0008191402 0.02211702 0.0004163802
## 88 88 0.03279820 0.1865528 0.02521165 0.0015372411 0.07472505 0.0012214912
## 89 89 0.03323389 0.1632888 0.02559192 0.0017463308 0.08303527 0.0014516929
## 90 90 0.03251770 0.2012134 0.02492155 0.0012163649 0.04593042 0.0009202539
## 91 91 0.03340325 0.1561680 0.02574804 0.0017804000 0.07756671 0.0013182787
## 92 92 0.03221946 0.2162595 0.02469021 0.0008243319 0.02158716 0.0004168150
## 93 93 0.03302336 0.1733501 0.02533779 0.0013942600 0.07947318 0.0011782834
## 94 94 0.03222983 0.2157896 0.02470660 0.0008287097 0.02132207 0.0004073701
## 95 95 0.03289306 0.1827537 0.02522324 0.0015523832 0.06498826 0.0011159597
## 96 96 0.03293556 0.1813974 0.02535792 0.0019118611 0.07500112 0.0013889920
## 97 97 0.03246402 0.2022768 0.02485139 0.0007639436 0.04634773 0.0004568322
## 98 98 0.03252429 0.2008413 0.02493926 0.0010750435 0.04713801 0.0006273854
## 99 99 0.03280327 0.1873779 0.02523611 0.0019067541 0.07561378 0.0013751561
## 100 100 0.03246248 0.2023956 0.02485764 0.0007633733 0.04622087 0.0004480062
## 101 101 0.03254378 0.2005523 0.02500363 0.0015061454 0.06186956 0.0010420025
## 102 102 0.03264950 0.1958557 0.02509464 0.0015246153 0.05425124 0.0011209119
## 103 103 0.03254503 0.2005299 0.02500657 0.0015068723 0.06203048 0.0010477680
## 104 104 0.03288968 0.1835950 0.02527077 0.0018837940 0.07096742 0.0014329345
## 105 105 0.03243559 0.2046713 0.02485292 0.0009404413 0.04633102 0.0006474864
## 106 106 0.03255054 0.2003227 0.02499796 0.0015106145 0.06218653 0.0010588459
## 107 107 0.03244588 0.2042427 0.02486009 0.0009351503 0.04648182 0.0006410827
## 108 108 0.03224900 0.2152140 0.02471380 0.0007920549 0.02094642 0.0004137188
## 109 109 0.03244695 0.2041920 0.02486047 0.0009437589 0.04655217 0.0006493135
## 110 110 0.03357546 0.1486055 0.02581837 0.0020009994 0.08502649 0.0015360109
## 111 111 0.03286891 0.1839594 0.02529617 0.0014788597 0.06623330 0.0010647275
## 112 112 0.03323185 0.1662339 0.02557556 0.0018870485 0.07770921 0.0015458780
## 113 113 0.03286578 0.1850184 0.02526490 0.0019239645 0.07618745 0.0014043353
## 114 114 0.03280926 0.1851060 0.02512701 0.0009270755 0.05829049 0.0005961029
## 115 115 0.03226681 0.2145424 0.02471225 0.0007916689 0.02064924 0.0004119487
## 116 116 0.03327290 0.1636512 0.02553013 0.0017515797 0.07469717 0.0014018809
## 117 117 0.03277678 0.1861284 0.02510491 0.0008395112 0.06381350 0.0008687698
## 118 118 0.03256160 0.1999976 0.02498529 0.0014885259 0.05322118 0.0010880768
## 119 119 0.03264519 0.1963022 0.02500761 0.0014496528 0.05514634 0.0010798893
## 120 120 0.03226804 0.2144763 0.02471012 0.0007868033 0.01972932 0.0003907377
## 121 121 0.03269678 0.1917941 0.02512118 0.0012989212 0.05344484 0.0010110089
## 122 122 0.03279612 0.1881949 0.02520303 0.0013130199 0.05277743 0.0011031751
## 123 123 0.03243115 0.2047506 0.02486390 0.0006669425 0.03520638 0.0004330723
## 124 124 0.03281738 0.1874196 0.02512332 0.0011777548 0.04916489 0.0008353239
## 125 125 0.03245523 0.2043000 0.02489021 0.0007888071 0.03873722 0.0006894282
## 126 126 0.03257862 0.1992888 0.02496322 0.0010232176 0.04352704 0.0006069843
## 127 127 0.03227936 0.2141487 0.02473016 0.0007772435 0.01994059 0.0003762790
## 128 128 0.03227953 0.2141572 0.02473061 0.0007714045 0.01999396 0.0003743417
## 129 129 0.03227830 0.2141924 0.02472791 0.0007744807 0.02016826 0.0003740321
## 130 130 0.03228860 0.2137809 0.02473662 0.0007900658 0.02034152 0.0003941740
## 131 131 0.03301961 0.1766699 0.02536761 0.0014856578 0.06514986 0.0010992444
## 132 132 0.03229274 0.2136238 0.02473704 0.0007801196 0.02003909 0.0003879666
## 133 133 0.03258921 0.1967811 0.02499315 0.0007443202 0.04591140 0.0005677860
## 134 134 0.03229410 0.2136440 0.02474010 0.0007856311 0.02057084 0.0003909050
## 135 135 0.03229773 0.2134859 0.02474310 0.0007879773 0.02068380 0.0003956302
## 136 136 0.03261240 0.1963390 0.02504414 0.0011151718 0.04467824 0.0008325959
## 137 137 0.03252213 0.2025624 0.02487032 0.0010373179 0.03450699 0.0006747428
## 138 138 0.03228886 0.2139092 0.02473060 0.0007877162 0.02061861 0.0004021387
## 139 139 0.03256088 0.1989855 0.02505341 0.0010217170 0.04247499 0.0007999031
## 140 140 0.03277623 0.1900521 0.02509640 0.0011278782 0.04306152 0.0006842554
## 141 141 0.03230265 0.2133254 0.02473883 0.0007809932 0.02063646 0.0003867723
## 142 142 0.03246920 0.2044665 0.02488700 0.0008936184 0.04129788 0.0006341345
## 143 143 0.03230216 0.2133205 0.02473715 0.0007890415 0.02064677 0.0003912278
## 144 144 0.03230688 0.2131008 0.02473884 0.0007830151 0.02022017 0.0003966267
## 145 145 0.03293139 0.1820256 0.02518850 0.0012260040 0.04972632 0.0009335254
## 146 146 0.03250568 0.2032790 0.02494296 0.0012258934 0.03953457 0.0008911099
## 147 147 0.03248756 0.2038270 0.02496698 0.0010561372 0.03814764 0.0007394057
## 148 148 0.03275726 0.1883747 0.02517710 0.0010593375 0.04738822 0.0009032608
## 149 149 0.03291900 0.1796423 0.02530868 0.0009728474 0.05771253 0.0008171156
## 150 150 0.03275141 0.1914618 0.02508962 0.0013699790 0.04624983 0.0010152230
## 151 151 0.03297507 0.1799907 0.02533140 0.0012567942 0.04877346 0.0008232400
## 152 152 0.03232623 0.2123224 0.02475943 0.0007741861 0.01973812 0.0003803414
## 153 153 0.03251049 0.2031080 0.02492873 0.0011638983 0.04335194 0.0007334779
## 154 154 0.03251734 0.2028365 0.02495328 0.0012233988 0.03926107 0.0008678396
## 155 155 0.03264658 0.1949548 0.02504479 0.0010596530 0.04858157 0.0007046614
## 156 156 0.03231853 0.2126711 0.02474864 0.0007823963 0.01934516 0.0003832608
## 157 157 0.03287990 0.1833578 0.02519433 0.0009891159 0.05015231 0.0007466431
## 158 158 0.03260146 0.1995998 0.02498921 0.0012346498 0.03792159 0.0008076680
## 159 159 0.03231542 0.2128369 0.02474674 0.0007840497 0.01979479 0.0003779156
## 160 160 0.03260385 0.1995005 0.02499242 0.0012443942 0.03841945 0.0008081441
## 161 161 0.03251534 0.2029624 0.02494688 0.0012503425 0.04035724 0.0008877029
## 162 162 0.03290476 0.1826845 0.02521566 0.0012945797 0.05028860 0.0009852825
## 163 163 0.03232087 0.2126422 0.02475282 0.0007884861 0.01953833 0.0003817910
## 164 164 0.03232300 0.2125721 0.02475382 0.0007856721 0.01954520 0.0003796819
## 165 165 0.03306946 0.1766407 0.02538798 0.0014866264 0.05629872 0.0009483844
## 166 166 0.03245195 0.2051684 0.02488844 0.0007406907 0.03090354 0.0005676735
## 167 167 0.03259538 0.1992343 0.02497067 0.0009522438 0.03840549 0.0005271611
## 168 168 0.03272168 0.1925954 0.02512958 0.0013159155 0.05336947 0.0010041650
## 169 169 0.03264518 0.1949326 0.02509603 0.0009474110 0.04365013 0.0006916047
## 170 170 0.03257014 0.2007859 0.02492621 0.0010822894 0.03381060 0.0006607604
## 171 171 0.03250088 0.2034485 0.02497122 0.0010526241 0.03702172 0.0007141032
## 172 172 0.03263198 0.1984194 0.02502244 0.0012544149 0.03882618 0.0008353300
## 173 173 0.03234129 0.2118288 0.02477128 0.0007791398 0.01928415 0.0003735436
## 174 174 0.03245146 0.2052766 0.02488887 0.0007317649 0.02992359 0.0005443266
## 175 175 0.03257956 0.2003694 0.02493323 0.0011090793 0.03536732 0.0006772445
## 176 176 0.03268309 0.1941390 0.02502724 0.0009846689 0.03939067 0.0007683524
## 177 177 0.03233963 0.2119021 0.02477123 0.0007786152 0.01936827 0.0003787499
## 178 178 0.03253876 0.2022266 0.02495133 0.0011916805 0.04418171 0.0007547929
## 179 179 0.03283334 0.1885540 0.02523140 0.0015372651 0.04909268 0.0011216441
## 180 180 0.03253778 0.2022475 0.02494960 0.0011941926 0.04418992 0.0007572986
## 181 181 0.03245076 0.2053006 0.02487644 0.0007360293 0.03016821 0.0005495584
## 182 182 0.03281298 0.1892332 0.02507113 0.0012754133 0.04286911 0.0008834036
## 183 183 0.03245045 0.2053276 0.02487352 0.0007394563 0.03018665 0.0005488941
## 184 184 0.03250659 0.2032244 0.02497423 0.0010575832 0.03754238 0.0007248236
## 185 185 0.03233975 0.2119005 0.02476122 0.0007836676 0.01963304 0.0003828298
## 186 186 0.03253421 0.2023374 0.02496341 0.0012501209 0.04037074 0.0009144853
## 187 187 0.03264806 0.1957261 0.02506037 0.0011460465 0.04820389 0.0008273187
## 188 188 0.03233826 0.2119864 0.02476167 0.0007822318 0.01971645 0.0003878753
## 189 189 0.03233869 0.2119851 0.02476208 0.0007790495 0.01979526 0.0003886246
## 190 190 0.03233894 0.2119923 0.02476104 0.0007768696 0.01983093 0.0003872731
## 191 191 0.03233939 0.2119837 0.02476349 0.0007749667 0.01980069 0.0003842887
## 192 192 0.03288869 0.1863463 0.02520388 0.0014420843 0.04578330 0.0009921659
## 193 193 0.03233693 0.2120958 0.02476280 0.0007791111 0.01983366 0.0003829389
## 194 194 0.03251343 0.2030529 0.02497746 0.0010736300 0.03840866 0.0007268128
## 195 195 0.03233664 0.2121282 0.02476264 0.0007776706 0.01966919 0.0003800219
## 196 196 0.03233712 0.2121063 0.02476277 0.0007774062 0.01969201 0.0003797941
## 197 197 0.03266754 0.1941865 0.02511150 0.0009650432 0.04505095 0.0007172576
## 198 198 0.03233820 0.2120682 0.02476267 0.0007769896 0.01975927 0.0003802039
## 199 199 0.03271942 0.1926770 0.02514848 0.0011619461 0.05327973 0.0008631609
## 200 200 0.03262722 0.1982521 0.02499375 0.0009884962 0.04034625 0.0005774699
## 201 201 0.03263095 0.1980946 0.02499582 0.0009928108 0.04055962 0.0005827746
## 202 202 0.03262977 0.1981477 0.02499573 0.0009924009 0.04060020 0.0005824297
## 203 203 0.03264462 0.1980091 0.02503111 0.0012736255 0.03991654 0.0008696827
## 204 204 0.03233804 0.2120777 0.02476241 0.0007765086 0.01970782 0.0003775372
## 205 205 0.03233659 0.2121419 0.02476230 0.0007768303 0.01976253 0.0003779411
## 206 206 0.03266084 0.1948383 0.02505225 0.0008833044 0.04868921 0.0007498102
## 207 207 0.03233816 0.2120633 0.02476386 0.0007750662 0.01958734 0.0003757045
## 208 208 0.03265196 0.1956450 0.02505825 0.0011435912 0.04843800 0.0008170874
## 209 209 0.03233921 0.2120226 0.02476448 0.0007743627 0.01956316 0.0003747725
## 210 210 0.03249264 0.2032119 0.02490248 0.0006680661 0.03356549 0.0004405823
## 211 211 0.03245821 0.2050668 0.02488914 0.0007349337 0.03083145 0.0005659010
## 212 212 0.03234070 0.2119695 0.02476533 0.0007736860 0.01968851 0.0003711108
## 213 213 0.03255296 0.2014111 0.02493741 0.0009534177 0.04491345 0.0006650987
## 214 214 0.03281808 0.1889785 0.02512721 0.0012427853 0.05295144 0.0008874842
## 215 215 0.03252377 0.2026923 0.02498991 0.0010798652 0.03866899 0.0007437722
## 216 216 0.03234116 0.2119540 0.02476628 0.0007736106 0.01965083 0.0003699500
## 217 217 0.03249035 0.2033511 0.02489902 0.0006656094 0.03313424 0.0004271639
## 218 218 0.03255661 0.2013434 0.02494207 0.0009611554 0.04529124 0.0006756475
## 219 219 0.03245915 0.2050601 0.02488948 0.0007348316 0.03090059 0.0005648241
## 220 220 0.03233980 0.2120221 0.02476513 0.0007732091 0.01961187 0.0003666285
## 221 221 0.03290651 0.1830118 0.02529317 0.0012180960 0.05803164 0.0010031310
## 222 222 0.03233955 0.2120339 0.02476491 0.0007752211 0.01962662 0.0003671327
## 223 223 0.03233933 0.2120434 0.02476507 0.0007744084 0.01962756 0.0003666365
## 224 224 0.03233949 0.2120405 0.02476484 0.0007741442 0.01963567 0.0003651737
## 225 225 0.03325016 0.1683168 0.02553365 0.0016489595 0.06047643 0.0011755957
## 226 226 0.03233936 0.2120489 0.02476585 0.0007749216 0.01966310 0.0003665493
## 227 227 0.03249056 0.2034074 0.02490424 0.0006690850 0.03313585 0.0004335597
## 228 228 0.03233879 0.2120782 0.02476617 0.0007748036 0.01964511 0.0003673891
## 229 229 0.03233760 0.2121265 0.02476520 0.0007753045 0.01963838 0.0003685072
## 230 230 0.03255749 0.2016811 0.02496442 0.0012429660 0.04646462 0.0007982236
## 231 231 0.03233849 0.2120847 0.02476654 0.0007744438 0.01961617 0.0003689440
## 232 232 0.03233907 0.2120604 0.02476699 0.0007746850 0.01961612 0.0003689958
## 233 233 0.03281237 0.1891698 0.02518206 0.0012848004 0.04757993 0.0009695255
## 234 234 0.03233894 0.2120640 0.02476691 0.0007748588 0.01958655 0.0003695165
## 235 235 0.03254307 0.2021794 0.02497901 0.0012597917 0.04069534 0.0009118314
## 236 236 0.03345434 0.1576488 0.02577368 0.0015268179 0.05888331 0.0011163873
## 237 237 0.03350972 0.1558862 0.02578852 0.0017672432 0.06198039 0.0012537860
## 238 238 0.03307605 0.1773725 0.02546007 0.0017135446 0.06199986 0.0011647812
## 239 239 0.03348090 0.1557793 0.02566540 0.0010811676 0.05124994 0.0009267201
## 240 240 0.03233956 0.2120384 0.02476680 0.0007749933 0.01958899 0.0003693663
## [1] "Best Model"
## nvmax
## 18 18
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients of final model:"
## Estimate 2.5 % 97.5 %
## (Intercept) 1.963771e+00 1.947652e+00 1.979890e+00
## x4 -5.320052e-05 -7.067149e-05 -3.572956e-05
## x7 1.145572e-02 1.023138e-02 1.268006e-02
## x8 4.151672e-04 1.287736e-04 7.015607e-04
## x9 3.304881e-03 2.666142e-03 3.943619e-03
## x10 1.330109e-03 7.363538e-04 1.923864e-03
## x11 2.322545e+05 9.026901e+04 3.742399e+05
## x16 9.724642e-04 5.613936e-04 1.383535e-03
## x17 1.732465e-03 1.103220e-03 2.361709e-03
## x21 1.441897e-04 6.289987e-05 2.254795e-04
## stat4 -5.620629e-04 -1.042930e-03 -8.119545e-05
## stat13 -6.103082e-04 -1.085991e-03 -1.346250e-04
## stat14 -9.719696e-04 -1.445196e-03 -4.987432e-04
## stat92 -5.379545e-04 -1.017316e-03 -5.859352e-05
## stat98 3.661289e-03 3.190942e-03 4.131635e-03
## stat100 7.073783e-04 2.284089e-04 1.186348e-03
## stat110 -3.314730e-03 -3.788999e-03 -2.840461e-03
## stat146 -7.717429e-04 -1.251591e-03 -2.918949e-04
## x18.sqrt 2.644477e-02 2.463045e-02 2.825908e-02
if (algo.stepwise.caret == TRUE){
test.model(model.stepwise, data.test
,method = 'leapSeq',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,id = id
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.033 2.085 2.097 2.097 2.110 2.150
## [1] "leapSeq Test MSE: 0.000993509884551637"
if (algo.LASSO.caret == TRUE){
set.seed(1)
tune.grid= expand.grid(alpha = 1,lambda = 10^seq(from=-4,to=0,length=100))
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "glmnet"
,subopt = 'LASSO'
,tune.grid = tune.grid
,feature.names = feature.names)
model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo, : There were missing values in resampled
## performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.000643 on full training set
## glmnet
##
## 5584 samples
## 240 predictor
##
## No pre-processing
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ...
## Resampling results across tuning parameters:
##
## lambda RMSE Rsquared MAE
## 0.0001000000 0.03210399 0.2203605 0.02460799
## 0.0001097499 0.03208546 0.2210637 0.02459714
## 0.0001204504 0.03206600 0.2218114 0.02458601
## 0.0001321941 0.03204598 0.2225907 0.02457468
## 0.0001450829 0.03202534 0.2234058 0.02456317
## 0.0001592283 0.03200393 0.2242633 0.02455100
## 0.0001747528 0.03198148 0.2251790 0.02453829
## 0.0001917910 0.03195820 0.2261474 0.02452560
## 0.0002104904 0.03193449 0.2271545 0.02451431
## 0.0002310130 0.03191097 0.2281734 0.02450427
## 0.0002535364 0.03188749 0.2292155 0.02449398
## 0.0002782559 0.03186345 0.2303146 0.02448301
## 0.0003053856 0.03183960 0.2314472 0.02447198
## 0.0003351603 0.03181677 0.2325776 0.02446162
## 0.0003678380 0.03179488 0.2337070 0.02445123
## 0.0004037017 0.03177435 0.2348175 0.02444192
## 0.0004430621 0.03175595 0.2358781 0.02443428
## 0.0004862602 0.03174176 0.2367838 0.02443072
## 0.0005336699 0.03173181 0.2375271 0.02443121
## 0.0005857021 0.03172455 0.2381932 0.02443410
## 0.0006428073 0.03172352 0.2386037 0.02444169
## 0.0007054802 0.03172853 0.2387832 0.02445402
## 0.0007742637 0.03173894 0.2387736 0.02446982
## 0.0008497534 0.03175531 0.2385591 0.02449036
## 0.0009326033 0.03177989 0.2380225 0.02452007
## 0.0010235310 0.03181523 0.2370290 0.02455797
## 0.0011233240 0.03186001 0.2356493 0.02460371
## 0.0012328467 0.03191074 0.2340768 0.02465537
## 0.0013530478 0.03196471 0.2324801 0.02471097
## 0.0014849683 0.03202280 0.2308272 0.02476928
## 0.0016297508 0.03208949 0.2288767 0.02483467
## 0.0017886495 0.03216561 0.2265995 0.02490776
## 0.0019630407 0.03224586 0.2243528 0.02498667
## 0.0021544347 0.03232777 0.2223542 0.02507063
## 0.0023644894 0.03241517 0.2204414 0.02516029
## 0.0025950242 0.03250966 0.2186031 0.02525603
## 0.0028480359 0.03261733 0.2165526 0.02536051
## 0.0031257158 0.03274517 0.2138644 0.02547940
## 0.0034304693 0.03289849 0.2101266 0.02561715
## 0.0037649358 0.03308223 0.2048401 0.02577523
## 0.0041320124 0.03329608 0.1977560 0.02595337
## 0.0045348785 0.03352503 0.1899198 0.02614103
## 0.0049770236 0.03377661 0.1804757 0.02634229
## 0.0054622772 0.03407109 0.1666127 0.02657274
## 0.0059948425 0.03438039 0.1501826 0.02680649
## 0.0065793322 0.03466592 0.1347470 0.02701157
## 0.0072208090 0.03493247 0.1202722 0.02719315
## 0.0079248290 0.03515412 0.1114842 0.02734203
## 0.0086974900 0.03534371 0.1102603 0.02746367
## 0.0095454846 0.03556152 0.1102603 0.02760707
## 0.0104761575 0.03582216 0.1102603 0.02778115
## 0.0114975700 0.03613365 0.1102603 0.02799140
## 0.0126185688 0.03629208 NaN 0.02809992
## 0.0138488637 0.03629208 NaN 0.02809992
## 0.0151991108 0.03629208 NaN 0.02809992
## 0.0166810054 0.03629208 NaN 0.02809992
## 0.0183073828 0.03629208 NaN 0.02809992
## 0.0200923300 0.03629208 NaN 0.02809992
## 0.0220513074 0.03629208 NaN 0.02809992
## 0.0242012826 0.03629208 NaN 0.02809992
## 0.0265608778 0.03629208 NaN 0.02809992
## 0.0291505306 0.03629208 NaN 0.02809992
## 0.0319926714 0.03629208 NaN 0.02809992
## 0.0351119173 0.03629208 NaN 0.02809992
## 0.0385352859 0.03629208 NaN 0.02809992
## 0.0422924287 0.03629208 NaN 0.02809992
## 0.0464158883 0.03629208 NaN 0.02809992
## 0.0509413801 0.03629208 NaN 0.02809992
## 0.0559081018 0.03629208 NaN 0.02809992
## 0.0613590727 0.03629208 NaN 0.02809992
## 0.0673415066 0.03629208 NaN 0.02809992
## 0.0739072203 0.03629208 NaN 0.02809992
## 0.0811130831 0.03629208 NaN 0.02809992
## 0.0890215085 0.03629208 NaN 0.02809992
## 0.0977009957 0.03629208 NaN 0.02809992
## 0.1072267222 0.03629208 NaN 0.02809992
## 0.1176811952 0.03629208 NaN 0.02809992
## 0.1291549665 0.03629208 NaN 0.02809992
## 0.1417474163 0.03629208 NaN 0.02809992
## 0.1555676144 0.03629208 NaN 0.02809992
## 0.1707352647 0.03629208 NaN 0.02809992
## 0.1873817423 0.03629208 NaN 0.02809992
## 0.2056512308 0.03629208 NaN 0.02809992
## 0.2257019720 0.03629208 NaN 0.02809992
## 0.2477076356 0.03629208 NaN 0.02809992
## 0.2718588243 0.03629208 NaN 0.02809992
## 0.2983647240 0.03629208 NaN 0.02809992
## 0.3274549163 0.03629208 NaN 0.02809992
## 0.3593813664 0.03629208 NaN 0.02809992
## 0.3944206059 0.03629208 NaN 0.02809992
## 0.4328761281 0.03629208 NaN 0.02809992
## 0.4750810162 0.03629208 NaN 0.02809992
## 0.5214008288 0.03629208 NaN 0.02809992
## 0.5722367659 0.03629208 NaN 0.02809992
## 0.6280291442 0.03629208 NaN 0.02809992
## 0.6892612104 0.03629208 NaN 0.02809992
## 0.7564633276 0.03629208 NaN 0.02809992
## 0.8302175681 0.03629208 NaN 0.02809992
## 0.9111627561 0.03629208 NaN 0.02809992
## 1.0000000000 0.03629208 NaN 0.02809992
##
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.0006428073.
## alpha lambda
## 21 1 0.0006428073
## alpha lambda RMSE Rsquared MAE RMSESD RsquaredSD MAESD
## 1 1 0.0001000000 0.03210399 0.2203605 0.02460799 0.0007935202 0.02145382 0.0003832270
## 2 1 0.0001097499 0.03208546 0.2210637 0.02459714 0.0007946932 0.02163842 0.0003836615
## 3 1 0.0001204504 0.03206600 0.2218114 0.02458601 0.0007962810 0.02184184 0.0003843099
## 4 1 0.0001321941 0.03204598 0.2225907 0.02457468 0.0007979939 0.02206900 0.0003853531
## 5 1 0.0001450829 0.03202534 0.2234058 0.02456317 0.0007996128 0.02231289 0.0003871173
## 6 1 0.0001592283 0.03200393 0.2242633 0.02455100 0.0008011079 0.02255857 0.0003883185
## 7 1 0.0001747528 0.03198148 0.2251790 0.02453829 0.0008028799 0.02283171 0.0003891233
## 8 1 0.0001917910 0.03195820 0.2261474 0.02452560 0.0008050571 0.02312546 0.0003893189
## 9 1 0.0002104904 0.03193449 0.2271545 0.02451431 0.0008070306 0.02344663 0.0003899836
## 10 1 0.0002310130 0.03191097 0.2281734 0.02450427 0.0008089887 0.02377454 0.0003917534
## 11 1 0.0002535364 0.03188749 0.2292155 0.02449398 0.0008109379 0.02410214 0.0003937996
## 12 1 0.0002782559 0.03186345 0.2303146 0.02448301 0.0008130628 0.02447112 0.0003976244
## 13 1 0.0003053856 0.03183960 0.2314472 0.02447198 0.0008159159 0.02493461 0.0004032098
## 14 1 0.0003351603 0.03181677 0.2325776 0.02446162 0.0008194352 0.02547903 0.0004090328
## 15 1 0.0003678380 0.03179488 0.2337070 0.02445123 0.0008241119 0.02602710 0.0004147960
## 16 1 0.0004037017 0.03177435 0.2348175 0.02444192 0.0008296260 0.02657489 0.0004200560
## 17 1 0.0004430621 0.03175595 0.2358781 0.02443428 0.0008347870 0.02714575 0.0004249118
## 18 1 0.0004862602 0.03174176 0.2367838 0.02443072 0.0008390173 0.02765790 0.0004306335
## 19 1 0.0005336699 0.03173181 0.2375271 0.02443121 0.0008423223 0.02808730 0.0004377113
## 20 1 0.0005857021 0.03172455 0.2381932 0.02443410 0.0008451508 0.02854276 0.0004436891
## 21 1 0.0006428073 0.03172352 0.2386037 0.02444169 0.0008481771 0.02896474 0.0004466905
## 22 1 0.0007054802 0.03172853 0.2387832 0.02445402 0.0008524801 0.02948467 0.0004492987
## 23 1 0.0007742637 0.03173894 0.2387736 0.02446982 0.0008564537 0.03005899 0.0004504762
## 24 1 0.0008497534 0.03175531 0.2385591 0.02449036 0.0008594646 0.03063964 0.0004516057
## 25 1 0.0009326033 0.03177989 0.2380225 0.02452007 0.0008614168 0.03127946 0.0004530471
## 26 1 0.0010235310 0.03181523 0.2370290 0.02455797 0.0008625583 0.03196639 0.0004545152
## 27 1 0.0011233240 0.03186001 0.2356493 0.02460371 0.0008628448 0.03262000 0.0004550758
## 28 1 0.0012328467 0.03191074 0.2340768 0.02465537 0.0008630417 0.03333725 0.0004549473
## 29 1 0.0013530478 0.03196471 0.2324801 0.02471097 0.0008629382 0.03404463 0.0004556688
## 30 1 0.0014849683 0.03202280 0.2308272 0.02476928 0.0008605413 0.03466240 0.0004575676
## 31 1 0.0016297508 0.03208949 0.2288767 0.02483467 0.0008560611 0.03518199 0.0004581944
## 32 1 0.0017886495 0.03216561 0.2265995 0.02490776 0.0008518197 0.03567584 0.0004586557
## 33 1 0.0019630407 0.03224586 0.2243528 0.02498667 0.0008486062 0.03612997 0.0004591083
## 34 1 0.0021544347 0.03232777 0.2223542 0.02507063 0.0008446877 0.03642562 0.0004554876
## 35 1 0.0023644894 0.03241517 0.2204414 0.02516029 0.0008394507 0.03653857 0.0004492301
## 36 1 0.0025950242 0.03250966 0.2186031 0.02525603 0.0008348327 0.03658424 0.0004415357
## 37 1 0.0028480359 0.03261733 0.2165526 0.02536051 0.0008303861 0.03671172 0.0004328230
## 38 1 0.0031257158 0.03274517 0.2138644 0.02547940 0.0008261283 0.03684089 0.0004248913
## 39 1 0.0034304693 0.03289849 0.2101266 0.02561715 0.0008222337 0.03689677 0.0004165862
## 40 1 0.0037649358 0.03308223 0.2048401 0.02577523 0.0008189187 0.03680612 0.0004105656
## 41 1 0.0041320124 0.03329608 0.1977560 0.02595337 0.0008169034 0.03631422 0.0004047119
## 42 1 0.0045348785 0.03352503 0.1899198 0.02614103 0.0008200889 0.03546705 0.0004007961
## 43 1 0.0049770236 0.03377661 0.1804757 0.02634229 0.0008280877 0.03392950 0.0003995561
## 44 1 0.0054622772 0.03407109 0.1666127 0.02657274 0.0008347953 0.03166086 0.0004032322
## 45 1 0.0059948425 0.03438039 0.1501826 0.02680649 0.0008307000 0.02970814 0.0004091012
## 46 1 0.0065793322 0.03466592 0.1347470 0.02701157 0.0008244113 0.02604380 0.0004136541
## 47 1 0.0072208090 0.03493247 0.1202722 0.02719315 0.0008294691 0.02162587 0.0004192989
## 48 1 0.0079248290 0.03515412 0.1114842 0.02734203 0.0008331495 0.02133947 0.0004203849
## 49 1 0.0086974900 0.03534371 0.1102603 0.02746367 0.0008460459 0.02129231 0.0004265407
## 50 1 0.0095454846 0.03556152 0.1102603 0.02760707 0.0008587567 0.02129231 0.0004380084
## 51 1 0.0104761575 0.03582216 0.1102603 0.02778115 0.0008728291 0.02129231 0.0004548419
## 52 1 0.0114975700 0.03613365 0.1102603 0.02799140 0.0008882768 0.02129231 0.0004734251
## 53 1 0.0126185688 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 54 1 0.0138488637 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 55 1 0.0151991108 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 56 1 0.0166810054 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 57 1 0.0183073828 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 58 1 0.0200923300 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 59 1 0.0220513074 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 60 1 0.0242012826 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 61 1 0.0265608778 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 62 1 0.0291505306 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 63 1 0.0319926714 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 64 1 0.0351119173 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 65 1 0.0385352859 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 66 1 0.0422924287 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 67 1 0.0464158883 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 68 1 0.0509413801 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 69 1 0.0559081018 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 70 1 0.0613590727 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 71 1 0.0673415066 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 72 1 0.0739072203 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 73 1 0.0811130831 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 74 1 0.0890215085 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 75 1 0.0977009957 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 76 1 0.1072267222 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 77 1 0.1176811952 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 78 1 0.1291549665 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 79 1 0.1417474163 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 80 1 0.1555676144 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 81 1 0.1707352647 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 82 1 0.1873817423 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 83 1 0.2056512308 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 84 1 0.2257019720 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 85 1 0.2477076356 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 86 1 0.2718588243 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 87 1 0.2983647240 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 88 1 0.3274549163 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 89 1 0.3593813664 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 90 1 0.3944206059 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 91 1 0.4328761281 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 92 1 0.4750810162 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 93 1 0.5214008288 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 94 1 0.5722367659 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 95 1 0.6280291442 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 96 1 0.6892612104 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 97 1 0.7564633276 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 98 1 0.8302175681 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 99 1 0.9111627561 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## 100 1 1.0000000000 0.03629208 NaN 0.02809992 0.0008692293 NA 0.0004701720
## Warning: Removed 48 rows containing missing values (geom_path).
## Warning: Removed 48 rows containing missing values (geom_point).
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients"
## model.coef
## (Intercept) 1.990104e+00
## x4 -3.938393e-05
## x7 1.042935e-02
## x8 1.894951e-04
## x9 2.826698e-03
## x10 8.732409e-04
## x11 1.195661e+05
## x14 -2.010102e-05
## x16 6.711387e-04
## x17 1.211343e-03
## x19 3.839363e-05
## x21 7.976827e-05
## stat4 -1.658655e-04
## stat8 3.817524e-05
## stat10 -2.307130e-06
## stat13 -2.989942e-04
## stat14 -6.116428e-04
## stat18 -1.785535e-05
## stat22 -8.833802e-05
## stat23 8.911169e-05
## stat24 -5.623264e-05
## stat30 4.033001e-05
## stat41 -3.338599e-05
## stat46 9.728429e-07
## stat60 1.183025e-04
## stat61 -6.817801e-06
## stat82 1.157306e-05
## stat91 -7.335950e-05
## stat92 -1.575420e-04
## stat96 -1.332241e-04
## stat98 3.307608e-03
## stat100 3.439836e-04
## stat110 -2.952876e-03
## stat113 -4.345356e-05
## stat116 5.826270e-06
## stat129 6.897843e-06
## stat131 8.088172e-06
## stat146 -4.053011e-04
## stat147 -1.055210e-04
## stat148 -8.021059e-05
## stat149 -5.039992e-05
## stat156 6.560043e-05
## stat172 2.102640e-05
## stat195 1.211166e-05
## stat202 -1.041845e-04
## stat217 2.821412e-06
## x18.sqrt 2.500270e-02
if (algo.LASSO.caret == TRUE){
test.model(model.LASSO.caret, data.test
,method = 'glmnet',subopt = "LASSO"
,formula = formula, feature.names = feature.names, label.names = label.names
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.043 2.086 2.097 2.097 2.109 2.142
## [1] "glmnet LASSO Test MSE: 0.000981078338776019"
if (algo.LARS.caret == TRUE){
set.seed(1)
returned = train.caret.glmselect(formula = formula
,data = data.train
,method = "lars"
,subopt = 'NULL'
,feature.names = feature.names)
model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info = trainInfo, : There were missing values in resampled
## performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.374 on full training set
## Least Angle Regression
##
## 5584 samples
## 240 predictor
##
## Pre-processing: centered (240), scaled (240)
## Resampling: Cross-Validated (10 fold)
## Summary of sample sizes: 5026, 5026, 5026, 5025, 5025, 5026, ...
## Resampling results across tuning parameters:
##
## fraction RMSE Rsquared MAE
## 0.00000000 0.03629208 NaN 0.02809992
## 0.01010101 0.03587241 0.1102603 0.02781449
## 0.02020202 0.03549825 0.1102603 0.02756510
## 0.03030303 0.03517147 0.1103256 0.02735318
## 0.04040404 0.03488924 0.1231906 0.02716510
## 0.05050505 0.03463225 0.1362221 0.02698942
## 0.06060606 0.03439492 0.1492912 0.02681748
## 0.07070707 0.03417240 0.1611945 0.02664809
## 0.08080808 0.03395836 0.1725497 0.02648327
## 0.09090909 0.03375568 0.1816054 0.02632409
## 0.10101010 0.03356520 0.1886756 0.02617191
## 0.11111111 0.03339186 0.1943484 0.02603162
## 0.12121212 0.03322779 0.1999367 0.02589657
## 0.13131313 0.03307075 0.2051440 0.02576516
## 0.14141414 0.03292376 0.2093860 0.02563893
## 0.15151515 0.03278697 0.2128273 0.02551724
## 0.16161616 0.03266051 0.2156073 0.02540135
## 0.17171717 0.03254449 0.2178423 0.02529081
## 0.18181818 0.03244319 0.2198158 0.02519043
## 0.19191919 0.03235666 0.2215832 0.02510082
## 0.20202020 0.03227602 0.2235003 0.02501724
## 0.21212121 0.03220019 0.2255045 0.02494131
## 0.22222222 0.03212964 0.2275905 0.02487350
## 0.23232323 0.03206444 0.2295393 0.02480984
## 0.24242424 0.03200809 0.2312125 0.02475419
## 0.25252525 0.03195886 0.2326215 0.02470398
## 0.26262626 0.03191259 0.2340270 0.02465615
## 0.27272727 0.03186989 0.2353624 0.02461200
## 0.28282828 0.03183100 0.2365740 0.02457255
## 0.29292929 0.03179639 0.2376196 0.02453718
## 0.30303030 0.03177041 0.2383187 0.02450880
## 0.31313131 0.03175203 0.2387042 0.02448724
## 0.32323232 0.03174069 0.2388204 0.02447402
## 0.33333333 0.03173271 0.2388490 0.02446435
## 0.34343434 0.03172717 0.2388270 0.02445517
## 0.35353535 0.03172319 0.2387887 0.02444726
## 0.36363636 0.03172107 0.2386882 0.02444139
## 0.37373737 0.03172088 0.2385183 0.02443750
## 0.38383838 0.03172314 0.2382469 0.02443490
## 0.39393939 0.03172612 0.2379611 0.02443311
## 0.40404040 0.03172975 0.2376580 0.02443282
## 0.41414141 0.03173362 0.2373567 0.02443229
## 0.42424242 0.03173743 0.2370698 0.02443173
## 0.43434343 0.03174151 0.2367762 0.02443158
## 0.44444444 0.03174615 0.2364657 0.02443193
## 0.45454545 0.03175180 0.2361138 0.02443373
## 0.46464646 0.03175791 0.2357482 0.02443606
## 0.47474747 0.03176441 0.2353698 0.02443855
## 0.48484848 0.03177082 0.2350017 0.02444087
## 0.49494949 0.03177766 0.2346217 0.02444350
## 0.50505051 0.03178490 0.2342289 0.02444681
## 0.51515152 0.03179228 0.2338345 0.02445002
## 0.52525253 0.03179993 0.2334324 0.02445363
## 0.53535354 0.03180764 0.2330338 0.02445713
## 0.54545455 0.03181546 0.2326360 0.02446070
## 0.55555556 0.03182333 0.2322405 0.02446414
## 0.56565657 0.03183161 0.2318307 0.02446767
## 0.57575758 0.03184011 0.2314166 0.02447130
## 0.58585859 0.03184891 0.2309946 0.02447536
## 0.59595960 0.03185796 0.2305669 0.02447963
## 0.60606061 0.03186715 0.2301396 0.02448391
## 0.61616162 0.03187660 0.2297045 0.02448838
## 0.62626263 0.03188599 0.2292778 0.02449280
## 0.63636364 0.03189535 0.2288580 0.02449694
## 0.64646465 0.03190462 0.2284467 0.02450088
## 0.65656566 0.03191413 0.2280292 0.02450484
## 0.66666667 0.03192402 0.2275990 0.02450909
## 0.67676768 0.03193416 0.2271614 0.02451362
## 0.68686869 0.03194445 0.2267208 0.02451841
## 0.69696970 0.03195496 0.2262752 0.02452362
## 0.70707071 0.03196548 0.2258345 0.02452904
## 0.71717172 0.03197624 0.2253882 0.02453497
## 0.72727273 0.03198709 0.2249428 0.02454115
## 0.73737374 0.03199790 0.2245031 0.02454718
## 0.74747475 0.03200873 0.2240678 0.02455331
## 0.75757576 0.03201982 0.2236252 0.02455969
## 0.76767677 0.03203111 0.2231776 0.02456596
## 0.77777778 0.03204248 0.2227304 0.02457224
## 0.78787879 0.03205408 0.2222775 0.02457882
## 0.79797980 0.03206612 0.2218097 0.02458556
## 0.80808081 0.03207826 0.2213425 0.02459245
## 0.81818182 0.03209071 0.2208668 0.02459968
## 0.82828283 0.03210320 0.2203939 0.02460710
## 0.83838384 0.03211577 0.2199219 0.02461474
## 0.84848485 0.03212874 0.2194371 0.02462271
## 0.85858586 0.03214198 0.2189458 0.02463086
## 0.86868687 0.03215546 0.2184479 0.02463943
## 0.87878788 0.03216891 0.2179561 0.02464800
## 0.88888889 0.03218229 0.2174719 0.02465669
## 0.89898990 0.03219569 0.2169925 0.02466554
## 0.90909091 0.03220933 0.2165089 0.02467479
## 0.91919192 0.03222312 0.2160229 0.02468438
## 0.92929293 0.03223709 0.2155342 0.02469416
## 0.93939394 0.03225124 0.2150423 0.02470410
## 0.94949495 0.03226560 0.2145458 0.02471429
## 0.95959596 0.03227999 0.2140532 0.02472452
## 0.96969697 0.03229450 0.2135581 0.02473478
## 0.97979798 0.03230927 0.2130577 0.02474526
## 0.98989899 0.03232433 0.2125493 0.02475606
## 1.00000000 0.03233956 0.2120384 0.02476680
##
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.3737374.
## fraction
## 38 0.3737374
## Warning: Removed 1 rows containing missing values (geom_point).
## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## [1] "Coefficients"
## x4 x7 x8 x9 x10 x11 x14 x16
## -1.890684e-03 7.073069e-03 5.717077e-04 3.685365e-03 1.247871e-03 7.249935e-04 -5.296714e-05 1.375757e-03
## x17 x19 x21 stat4 stat8 stat10 stat13 stat14
## 1.620762e-03 1.307679e-04 8.436611e-04 -3.130447e-04 9.452336e-05 -5.074379e-06 -5.453694e-04 -1.099356e-03
## stat18 stat22 stat23 stat24 stat30 stat41 stat60 stat61
## -5.991246e-05 -1.799253e-04 1.825354e-04 -1.248379e-04 9.499803e-05 -8.389943e-05 2.343882e-04 -2.122117e-05
## stat82 stat91 stat92 stat96 stat98 stat100 stat110 stat113
## 3.883871e-05 -1.518104e-04 -2.979662e-04 -2.612562e-04 5.838975e-03 6.224400e-04 -5.175122e-03 -1.002359e-04
## stat116 stat129 stat131 stat146 stat147 stat148 stat149 stat156
## 1.750602e-05 2.240628e-05 2.665935e-05 -7.230611e-04 -2.096360e-04 -1.680554e-04 -1.151108e-04 1.433593e-04
## stat172 stat195 stat202 stat217 x18.sqrt
## 6.651632e-05 4.021286e-05 -2.031455e-04 6.389420e-06 1.142576e-02
if (algo.LARS.caret == TRUE){
test.model(model.LARS.caret, data.test
,method = 'lars',subopt = NULL
,formula = formula, feature.names = feature.names, label.names = label.names
,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
## Min. 1st Qu. Median Mean 3rd Qu. Max.
## 2.043 2.086 2.097 2.097 2.109 2.142
## [1] "lars Test MSE: 0.000981247811228031"
sessionInfo()
## R version 3.5.2 (2018-12-20)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17763)
##
## Matrix products: default
##
## locale:
## [1] LC_COLLATE=English_United States.1252 LC_CTYPE=English_United States.1252 LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C LC_TIME=English_United States.1252
##
## attached base packages:
## [1] parallel stats graphics grDevices utils datasets methods base
##
## other attached packages:
## [1] bindrcpp_0.2.2 knitr_1.21 htmltools_0.3.6 reshape2_1.4.3
## [5] lars_1.2 doParallel_1.0.14 iterators_1.0.10 caret_6.0-81
## [9] leaps_3.0 ggforce_0.1.3 rlist_0.4.6.1 car_3.0-2
## [13] carData_3.0-2 bestNormalize_1.3.0 scales_1.0.0 onewaytests_2.0
## [17] caTools_1.17.1.1 mosaic_1.5.0 mosaicData_0.17.0 ggformula_0.9.1
## [21] ggstance_0.3.1 lattice_0.20-38 DT_0.5 ggiraphExtra_0.2.9
## [25] ggiraph_0.6.0 investr_1.4.0 glmnet_2.0-16 foreach_1.4.4
## [29] Matrix_1.2-15 MASS_7.3-51.1 PerformanceAnalytics_1.5.2 xts_0.11-2
## [33] zoo_1.8-4 forcats_0.3.0 stringr_1.4.0 dplyr_0.7.8
## [37] purrr_0.3.0 readr_1.3.1 tidyr_0.8.2 tibble_2.0.1
## [41] ggplot2_3.1.0 tidyverse_1.2.1 usdm_1.1-18 raster_2.8-19
## [45] sp_1.3-1 pacman_0.5.0
##
## loaded via a namespace (and not attached):
## [1] readxl_1.2.0 backports_1.1.3 plyr_1.8.4 lazyeval_0.2.1 splines_3.5.2 mycor_0.1.1
## [7] crosstalk_1.0.0 leaflet_2.0.2 digest_0.6.18 magrittr_1.5 mosaicCore_0.6.0 openxlsx_4.1.0
## [13] recipes_0.1.4 modelr_0.1.3 gower_0.1.2 colorspace_1.4-0 rvest_0.3.2 ggrepel_0.8.0
## [19] haven_2.0.0 xfun_0.4 crayon_1.3.4 jsonlite_1.6 bindr_0.1.1 survival_2.43-3
## [25] glue_1.3.0 registry_0.5 gtable_0.2.0 ppcor_1.1 ipred_0.9-8 sjmisc_2.7.7
## [31] abind_1.4-5 rngtools_1.3.1 bibtex_0.4.2 Rcpp_1.0.0 xtable_1.8-3 units_0.6-2
## [37] foreign_0.8-71 stats4_3.5.2 lava_1.6.5 prodlim_2018.04.18 prediction_0.3.6.2 htmlwidgets_1.3
## [43] httr_1.4.0 RColorBrewer_1.1-2 pkgconfig_2.0.2 farver_1.1.0 nnet_7.3-12 labeling_0.3
## [49] tidyselect_0.2.5 rlang_0.3.1 later_0.8.0 munsell_0.5.0 cellranger_1.1.0 tools_3.5.2
## [55] cli_1.0.1 generics_0.0.2 moments_0.14 sjlabelled_1.0.16 broom_0.5.1 evaluate_0.13
## [61] ggdendro_0.1-20 yaml_2.2.0 ModelMetrics_1.2.2 zip_1.0.0 nlme_3.1-137 doRNG_1.7.1
## [67] mime_0.6 xml2_1.2.0 compiler_3.5.2 rstudioapi_0.9.0 curl_3.3 tweenr_1.0.1
## [73] stringi_1.3.1 highr_0.7 gdtools_0.1.7 stringdist_0.9.5.1 pillar_1.3.1 data.table_1.12.0
## [79] bitops_1.0-6 httpuv_1.4.5.1 R6_2.4.0 promises_1.0.1 gridExtra_2.3 rio_0.5.16
## [85] codetools_0.2-15 assertthat_0.2.0 pkgmaker_0.27 withr_2.1.2 nortest_1.0-4 mgcv_1.8-26
## [91] hms_0.4.2 rpart_4.1-13 quadprog_1.5-5 grid_3.5.2 timeDate_3043.102 class_7.3-14
## [97] rmarkdown_1.11 snakecase_0.9.2 shiny_1.2.0 lubridate_1.7.4